|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9840490797546013, |
|
"eval_steps": 500, |
|
"global_step": 814, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.317, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 1.1248, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2000000000000002e-07, |
|
"loss": 1.0845, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.8e-07, |
|
"loss": 1.0864, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 1.0715, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 1.0756, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6e-07, |
|
"loss": 0.9961, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 0.9972, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 1.0152, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.4e-07, |
|
"loss": 1.0149, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.0061, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.6e-07, |
|
"loss": 0.9948, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.2e-07, |
|
"loss": 0.9787, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.8e-07, |
|
"loss": 0.9936, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.400000000000001e-07, |
|
"loss": 0.9496, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9e-07, |
|
"loss": 0.9368, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 0.9234, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.0200000000000002e-06, |
|
"loss": 0.9324, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.08e-06, |
|
"loss": 0.9183, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.14e-06, |
|
"loss": 0.9273, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 0.9165, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.26e-06, |
|
"loss": 0.9161, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.32e-06, |
|
"loss": 0.8887, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3800000000000001e-06, |
|
"loss": 0.9093, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.44e-06, |
|
"loss": 0.8891, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.8893, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.56e-06, |
|
"loss": 0.8838, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.6200000000000002e-06, |
|
"loss": 0.8701, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.6800000000000002e-06, |
|
"loss": 0.8748, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.7399999999999999e-06, |
|
"loss": 0.8831, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.8e-06, |
|
"loss": 0.8656, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.86e-06, |
|
"loss": 0.8655, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 0.8681, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.98e-06, |
|
"loss": 0.8682, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.0400000000000004e-06, |
|
"loss": 0.8618, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.1e-06, |
|
"loss": 0.874, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.16e-06, |
|
"loss": 0.8305, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.22e-06, |
|
"loss": 0.8392, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.28e-06, |
|
"loss": 0.8441, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.34e-06, |
|
"loss": 0.829, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 0.8422, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.4599999999999997e-06, |
|
"loss": 0.8295, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.52e-06, |
|
"loss": 0.8349, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.58e-06, |
|
"loss": 0.8204, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.64e-06, |
|
"loss": 0.8344, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.7e-06, |
|
"loss": 0.8121, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.7600000000000003e-06, |
|
"loss": 0.817, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.82e-06, |
|
"loss": 0.8095, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.88e-06, |
|
"loss": 0.7887, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 0.7916, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3e-06, |
|
"loss": 0.8136, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.06e-06, |
|
"loss": 0.8269, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.12e-06, |
|
"loss": 0.788, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.18e-06, |
|
"loss": 0.8035, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.2400000000000003e-06, |
|
"loss": 0.8079, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.3e-06, |
|
"loss": 0.8097, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.3600000000000004e-06, |
|
"loss": 0.825, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.42e-06, |
|
"loss": 0.8157, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.4799999999999997e-06, |
|
"loss": 0.8158, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.54e-06, |
|
"loss": 0.8033, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.6e-06, |
|
"loss": 0.7999, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.66e-06, |
|
"loss": 0.7915, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.72e-06, |
|
"loss": 0.8036, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.7800000000000002e-06, |
|
"loss": 0.7854, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.8400000000000005e-06, |
|
"loss": 0.7872, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.9e-06, |
|
"loss": 0.8166, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.96e-06, |
|
"loss": 0.7972, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.0200000000000005e-06, |
|
"loss": 0.7959, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.080000000000001e-06, |
|
"loss": 0.7973, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.14e-06, |
|
"loss": 0.7606, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.2e-06, |
|
"loss": 0.7859, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.26e-06, |
|
"loss": 0.798, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.32e-06, |
|
"loss": 0.7739, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.38e-06, |
|
"loss": 0.7898, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.44e-06, |
|
"loss": 0.7995, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.7688, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.56e-06, |
|
"loss": 0.7937, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.62e-06, |
|
"loss": 0.7708, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.68e-06, |
|
"loss": 0.7963, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.74e-06, |
|
"loss": 0.7817, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.752, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.86e-06, |
|
"loss": 0.7891, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9199999999999995e-06, |
|
"loss": 0.7742, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.98e-06, |
|
"loss": 0.7814, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 5.04e-06, |
|
"loss": 0.7851, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 5.1e-06, |
|
"loss": 0.7643, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 5.16e-06, |
|
"loss": 0.7853, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.22e-06, |
|
"loss": 0.7654, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.28e-06, |
|
"loss": 0.7698, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.3400000000000005e-06, |
|
"loss": 0.764, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.4e-06, |
|
"loss": 0.7493, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.46e-06, |
|
"loss": 0.7841, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.5200000000000005e-06, |
|
"loss": 0.7774, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.580000000000001e-06, |
|
"loss": 0.7793, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.64e-06, |
|
"loss": 0.7687, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.7e-06, |
|
"loss": 0.7697, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.76e-06, |
|
"loss": 0.7685, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.82e-06, |
|
"loss": 0.7714, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.8800000000000005e-06, |
|
"loss": 0.7744, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.94e-06, |
|
"loss": 0.7819, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 6e-06, |
|
"loss": 0.7874, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.991596638655462e-06, |
|
"loss": 0.7769, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.9831932773109244e-06, |
|
"loss": 0.7852, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 5.9747899159663866e-06, |
|
"loss": 0.7852, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 5.966386554621849e-06, |
|
"loss": 0.7518, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 5.957983193277311e-06, |
|
"loss": 0.7671, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 5.949579831932774e-06, |
|
"loss": 0.7748, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 5.941176470588236e-06, |
|
"loss": 0.7515, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 5.932773109243698e-06, |
|
"loss": 0.7456, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 5.92436974789916e-06, |
|
"loss": 0.7801, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 5.915966386554622e-06, |
|
"loss": 0.755, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 5.9075630252100845e-06, |
|
"loss": 0.7659, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.899159663865546e-06, |
|
"loss": 0.7668, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.890756302521008e-06, |
|
"loss": 0.7369, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.88235294117647e-06, |
|
"loss": 0.7754, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.873949579831933e-06, |
|
"loss": 0.7463, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.865546218487395e-06, |
|
"loss": 0.7554, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.857142857142857e-06, |
|
"loss": 0.7728, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.8487394957983195e-06, |
|
"loss": 0.7504, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.840336134453782e-06, |
|
"loss": 0.7919, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.831932773109244e-06, |
|
"loss": 0.7601, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.823529411764706e-06, |
|
"loss": 0.7127, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.815126050420168e-06, |
|
"loss": 0.7609, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.80672268907563e-06, |
|
"loss": 0.7695, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.798319327731092e-06, |
|
"loss": 0.7576, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.7899159663865544e-06, |
|
"loss": 0.7576, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.781512605042017e-06, |
|
"loss": 0.7702, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.7731092436974796e-06, |
|
"loss": 0.7298, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.764705882352942e-06, |
|
"loss": 0.7614, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.756302521008404e-06, |
|
"loss": 0.7551, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.747899159663866e-06, |
|
"loss": 0.7866, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.739495798319328e-06, |
|
"loss": 0.7705, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.73109243697479e-06, |
|
"loss": 0.765, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.722689075630252e-06, |
|
"loss": 0.765, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 0.7346, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.705882352941177e-06, |
|
"loss": 0.771, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.697478991596639e-06, |
|
"loss": 0.7689, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.689075630252101e-06, |
|
"loss": 0.7632, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.680672268907563e-06, |
|
"loss": 0.7282, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.672268907563025e-06, |
|
"loss": 0.7497, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 5.663865546218487e-06, |
|
"loss": 0.7766, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 5.6554621848739495e-06, |
|
"loss": 0.7348, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 5.647058823529412e-06, |
|
"loss": 0.776, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 5.638655462184874e-06, |
|
"loss": 0.7627, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 5.630252100840336e-06, |
|
"loss": 0.7495, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 5.621848739495799e-06, |
|
"loss": 0.7636, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 5.613445378151261e-06, |
|
"loss": 0.778, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 5.605042016806723e-06, |
|
"loss": 0.7537, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 5.596638655462185e-06, |
|
"loss": 0.7634, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 5.588235294117647e-06, |
|
"loss": 0.7456, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 5.5798319327731096e-06, |
|
"loss": 0.7694, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 5.571428571428572e-06, |
|
"loss": 0.7776, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 5.563025210084034e-06, |
|
"loss": 0.7586, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 5.554621848739496e-06, |
|
"loss": 0.726, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 5.546218487394958e-06, |
|
"loss": 0.7561, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 5.53781512605042e-06, |
|
"loss": 0.7481, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.529411764705882e-06, |
|
"loss": 0.7465, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.5210084033613445e-06, |
|
"loss": 0.7681, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.512605042016807e-06, |
|
"loss": 0.7355, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.504201680672269e-06, |
|
"loss": 0.7753, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.495798319327731e-06, |
|
"loss": 0.7721, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.487394957983193e-06, |
|
"loss": 0.7413, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.478991596638655e-06, |
|
"loss": 0.7471, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.470588235294117e-06, |
|
"loss": 0.7569, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.4621848739495795e-06, |
|
"loss": 0.7632, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.4537815126050425e-06, |
|
"loss": 0.7278, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.445378151260505e-06, |
|
"loss": 0.7726, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.436974789915967e-06, |
|
"loss": 0.7256, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.428571428571429e-06, |
|
"loss": 0.7721, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.420168067226891e-06, |
|
"loss": 0.7643, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.411764705882353e-06, |
|
"loss": 0.7544, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.403361344537815e-06, |
|
"loss": 0.7246, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.394957983193277e-06, |
|
"loss": 0.7754, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.3865546218487396e-06, |
|
"loss": 0.7644, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.378151260504202e-06, |
|
"loss": 0.7507, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.369747899159664e-06, |
|
"loss": 0.741, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.361344537815127e-06, |
|
"loss": 0.72, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.352941176470589e-06, |
|
"loss": 0.75, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.344537815126051e-06, |
|
"loss": 0.7542, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.336134453781512e-06, |
|
"loss": 0.7606, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.3277310924369745e-06, |
|
"loss": 0.7525, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.319327731092437e-06, |
|
"loss": 0.7413, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.310924369747899e-06, |
|
"loss": 0.7422, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.302521008403361e-06, |
|
"loss": 0.7243, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.294117647058824e-06, |
|
"loss": 0.7377, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.285714285714286e-06, |
|
"loss": 0.7262, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.277310924369748e-06, |
|
"loss": 0.7349, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.26890756302521e-06, |
|
"loss": 0.7409, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.2605042016806725e-06, |
|
"loss": 0.7494, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.252100840336135e-06, |
|
"loss": 0.7457, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.243697478991597e-06, |
|
"loss": 0.7596, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.235294117647059e-06, |
|
"loss": 0.726, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.226890756302521e-06, |
|
"loss": 0.7229, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.218487394957983e-06, |
|
"loss": 0.7744, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.210084033613445e-06, |
|
"loss": 0.7112, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.201680672268908e-06, |
|
"loss": 0.7394, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.19327731092437e-06, |
|
"loss": 0.7422, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.1848739495798325e-06, |
|
"loss": 0.7396, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.176470588235295e-06, |
|
"loss": 0.7332, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.168067226890757e-06, |
|
"loss": 0.7368, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.159663865546219e-06, |
|
"loss": 0.7457, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.15126050420168e-06, |
|
"loss": 0.734, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.142857142857142e-06, |
|
"loss": 0.7245, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.1344537815126045e-06, |
|
"loss": 0.7477, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.1260504201680675e-06, |
|
"loss": 0.7553, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.11764705882353e-06, |
|
"loss": 0.7347, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.109243697478992e-06, |
|
"loss": 0.7378, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.100840336134454e-06, |
|
"loss": 0.7463, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.092436974789916e-06, |
|
"loss": 0.7439, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.084033613445378e-06, |
|
"loss": 0.7549, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.07563025210084e-06, |
|
"loss": 0.7304, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.0672268907563025e-06, |
|
"loss": 0.7288, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.058823529411765e-06, |
|
"loss": 0.7473, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.050420168067227e-06, |
|
"loss": 0.7537, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.042016806722689e-06, |
|
"loss": 0.7504, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.033613445378152e-06, |
|
"loss": 0.7619, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.025210084033614e-06, |
|
"loss": 0.7302, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.016806722689076e-06, |
|
"loss": 0.7483, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.008403361344538e-06, |
|
"loss": 0.7385, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5e-06, |
|
"loss": 0.7463, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.9915966386554625e-06, |
|
"loss": 0.7225, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.983193277310925e-06, |
|
"loss": 0.7325, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.974789915966387e-06, |
|
"loss": 0.7461, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.966386554621849e-06, |
|
"loss": 0.72, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.957983193277311e-06, |
|
"loss": 0.7513, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.949579831932773e-06, |
|
"loss": 0.7179, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.941176470588235e-06, |
|
"loss": 0.7451, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.9327731092436975e-06, |
|
"loss": 0.7402, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.92436974789916e-06, |
|
"loss": 0.758, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.915966386554622e-06, |
|
"loss": 0.7291, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.907563025210084e-06, |
|
"loss": 0.7653, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.899159663865546e-06, |
|
"loss": 0.7223, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.890756302521008e-06, |
|
"loss": 0.7531, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.88235294117647e-06, |
|
"loss": 0.7324, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.873949579831933e-06, |
|
"loss": 0.7454, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.8655462184873955e-06, |
|
"loss": 0.7254, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.857142857142858e-06, |
|
"loss": 0.7589, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.84873949579832e-06, |
|
"loss": 0.7408, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.840336134453782e-06, |
|
"loss": 0.7279, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.831932773109244e-06, |
|
"loss": 0.7654, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.823529411764706e-06, |
|
"loss": 0.732, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.815126050420168e-06, |
|
"loss": 0.7226, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.80672268907563e-06, |
|
"loss": 0.7326, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.7983193277310925e-06, |
|
"loss": 0.7423, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.789915966386555e-06, |
|
"loss": 0.7442, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.781512605042018e-06, |
|
"loss": 0.7259, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.773109243697479e-06, |
|
"loss": 0.7485, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.764705882352941e-06, |
|
"loss": 0.7196, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.756302521008403e-06, |
|
"loss": 0.7476, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.747899159663865e-06, |
|
"loss": 0.7238, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.7394957983193275e-06, |
|
"loss": 0.7322, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.73109243697479e-06, |
|
"loss": 0.7438, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.722689075630252e-06, |
|
"loss": 0.7517, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.714285714285714e-06, |
|
"loss": 0.7327, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.705882352941177e-06, |
|
"loss": 0.7248, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.697478991596639e-06, |
|
"loss": 0.7339, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.689075630252101e-06, |
|
"loss": 0.7338, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.680672268907563e-06, |
|
"loss": 0.7384, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.6722689075630255e-06, |
|
"loss": 0.7279, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.663865546218488e-06, |
|
"loss": 0.7456, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.65546218487395e-06, |
|
"loss": 0.7599, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.647058823529412e-06, |
|
"loss": 0.7286, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.638655462184874e-06, |
|
"loss": 0.7471, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.630252100840336e-06, |
|
"loss": 0.7443, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.621848739495798e-06, |
|
"loss": 0.7352, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.613445378151261e-06, |
|
"loss": 0.7536, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.605042016806723e-06, |
|
"loss": 0.7499, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.5966386554621855e-06, |
|
"loss": 0.7519, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.588235294117647e-06, |
|
"loss": 0.7293, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.579831932773109e-06, |
|
"loss": 0.7201, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.571428571428571e-06, |
|
"loss": 0.7494, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.563025210084033e-06, |
|
"loss": 0.7146, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.554621848739495e-06, |
|
"loss": 0.7421, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.5462184873949575e-06, |
|
"loss": 0.71, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.5378151260504205e-06, |
|
"loss": 0.7479, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.529411764705883e-06, |
|
"loss": 0.7583, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.521008403361345e-06, |
|
"loss": 0.7293, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.512605042016807e-06, |
|
"loss": 0.7295, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.504201680672269e-06, |
|
"loss": 0.7468, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.495798319327731e-06, |
|
"loss": 0.744, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.487394957983193e-06, |
|
"loss": 0.7154, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.4789915966386555e-06, |
|
"loss": 0.7305, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.470588235294118e-06, |
|
"loss": 0.736, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.46218487394958e-06, |
|
"loss": 0.7375, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.453781512605043e-06, |
|
"loss": 0.7513, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.445378151260505e-06, |
|
"loss": 0.7035, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.436974789915967e-06, |
|
"loss": 0.7273, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.428571428571429e-06, |
|
"loss": 0.7304, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.420168067226891e-06, |
|
"loss": 0.7278, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.411764705882353e-06, |
|
"loss": 0.7299, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.403361344537815e-06, |
|
"loss": 0.7275, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.394957983193277e-06, |
|
"loss": 0.7232, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.386554621848739e-06, |
|
"loss": 0.697, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.378151260504202e-06, |
|
"loss": 0.7324, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.369747899159664e-06, |
|
"loss": 0.733, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.361344537815126e-06, |
|
"loss": 0.7218, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.352941176470588e-06, |
|
"loss": 0.6961, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.3445378151260505e-06, |
|
"loss": 0.7239, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.336134453781513e-06, |
|
"loss": 0.7167, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.327731092436975e-06, |
|
"loss": 0.7439, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.319327731092437e-06, |
|
"loss": 0.7214, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.310924369747899e-06, |
|
"loss": 0.7495, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.302521008403361e-06, |
|
"loss": 0.728, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.294117647058823e-06, |
|
"loss": 0.7445, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.285714285714286e-06, |
|
"loss": 0.7366, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.2773109243697484e-06, |
|
"loss": 0.7275, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.268907563025211e-06, |
|
"loss": 0.7455, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.260504201680673e-06, |
|
"loss": 0.7443, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.252100840336135e-06, |
|
"loss": 0.7016, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.243697478991597e-06, |
|
"loss": 0.7468, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.235294117647059e-06, |
|
"loss": 0.7084, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.226890756302521e-06, |
|
"loss": 0.7252, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.2184873949579826e-06, |
|
"loss": 0.7321, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.2100840336134455e-06, |
|
"loss": 0.7194, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.201680672268908e-06, |
|
"loss": 0.7089, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.19327731092437e-06, |
|
"loss": 0.731, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.184873949579832e-06, |
|
"loss": 0.7354, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.176470588235294e-06, |
|
"loss": 0.7188, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.168067226890756e-06, |
|
"loss": 0.7285, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.159663865546218e-06, |
|
"loss": 0.7187, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.1512605042016805e-06, |
|
"loss": 0.7402, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.142857142857143e-06, |
|
"loss": 0.7194, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.134453781512605e-06, |
|
"loss": 0.7422, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.126050420168067e-06, |
|
"loss": 0.7457, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.11764705882353e-06, |
|
"loss": 0.7158, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.109243697478992e-06, |
|
"loss": 0.7252, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.100840336134454e-06, |
|
"loss": 0.7123, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.092436974789916e-06, |
|
"loss": 0.7281, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.0840336134453784e-06, |
|
"loss": 0.7347, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.075630252100841e-06, |
|
"loss": 0.7285, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.067226890756303e-06, |
|
"loss": 0.7389, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.058823529411765e-06, |
|
"loss": 0.7376, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.050420168067227e-06, |
|
"loss": 0.7392, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.042016806722689e-06, |
|
"loss": 0.7471, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.033613445378151e-06, |
|
"loss": 0.737, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.025210084033613e-06, |
|
"loss": 0.7264, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.0168067226890755e-06, |
|
"loss": 0.7135, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.008403361344538e-06, |
|
"loss": 0.7076, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4e-06, |
|
"loss": 0.7435, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.991596638655462e-06, |
|
"loss": 0.7291, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.983193277310924e-06, |
|
"loss": 0.7362, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.974789915966386e-06, |
|
"loss": 0.7216, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.966386554621848e-06, |
|
"loss": 0.7304, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.957983193277311e-06, |
|
"loss": 0.7347, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.9495798319327735e-06, |
|
"loss": 0.725, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.941176470588236e-06, |
|
"loss": 0.735, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.932773109243698e-06, |
|
"loss": 0.6993, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.92436974789916e-06, |
|
"loss": 0.7304, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.915966386554622e-06, |
|
"loss": 0.7319, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.907563025210084e-06, |
|
"loss": 0.7302, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.899159663865546e-06, |
|
"loss": 0.7174, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.8907563025210084e-06, |
|
"loss": 0.7344, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.882352941176471e-06, |
|
"loss": 0.7472, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.873949579831933e-06, |
|
"loss": 0.7265, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.865546218487396e-06, |
|
"loss": 0.7508, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.857142857142858e-06, |
|
"loss": 0.7163, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.84873949579832e-06, |
|
"loss": 0.7336, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.840336134453781e-06, |
|
"loss": 0.7464, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.831932773109243e-06, |
|
"loss": 0.7261, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.8235294117647055e-06, |
|
"loss": 0.7222, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.815126050420168e-06, |
|
"loss": 0.7522, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.8067226890756302e-06, |
|
"loss": 0.7152, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.7983193277310924e-06, |
|
"loss": 0.7134, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.7899159663865545e-06, |
|
"loss": 0.7028, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.7815126050420167e-06, |
|
"loss": 0.7166, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.773109243697479e-06, |
|
"loss": 0.7329, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.7647058823529414e-06, |
|
"loss": 0.7305, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.7563025210084035e-06, |
|
"loss": 0.7561, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.7478991596638656e-06, |
|
"loss": 0.7281, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.7394957983193278e-06, |
|
"loss": 0.7117, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.73109243697479e-06, |
|
"loss": 0.7031, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.7226890756302525e-06, |
|
"loss": 0.7133, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.7142857142857146e-06, |
|
"loss": 0.7399, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.7058823529411767e-06, |
|
"loss": 0.7102, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.697478991596639e-06, |
|
"loss": 0.716, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.689075630252101e-06, |
|
"loss": 0.7448, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.6806722689075636e-06, |
|
"loss": 0.7139, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.6722689075630257e-06, |
|
"loss": 0.7244, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.663865546218488e-06, |
|
"loss": 0.731, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.655462184873949e-06, |
|
"loss": 0.7344, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.6470588235294117e-06, |
|
"loss": 0.7297, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.638655462184874e-06, |
|
"loss": 0.718, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.630252100840336e-06, |
|
"loss": 0.7262, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.621848739495798e-06, |
|
"loss": 0.7185, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.6134453781512602e-06, |
|
"loss": 0.7084, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.605042016806723e-06, |
|
"loss": 0.7152, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.596638655462185e-06, |
|
"loss": 0.6996, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.588235294117647e-06, |
|
"loss": 0.7184, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.5798319327731092e-06, |
|
"loss": 0.7, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.5714285714285714e-06, |
|
"loss": 0.7155, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.5630252100840335e-06, |
|
"loss": 0.7204, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.554621848739496e-06, |
|
"loss": 0.7248, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.546218487394958e-06, |
|
"loss": 0.7232, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.5378151260504203e-06, |
|
"loss": 0.7163, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.5294117647058825e-06, |
|
"loss": 0.7219, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.5210084033613446e-06, |
|
"loss": 0.7422, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.512605042016807e-06, |
|
"loss": 0.719, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.5042016806722693e-06, |
|
"loss": 0.7058, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.4957983193277314e-06, |
|
"loss": 0.7321, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.4873949579831936e-06, |
|
"loss": 0.7247, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.4789915966386557e-06, |
|
"loss": 0.721, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.470588235294118e-06, |
|
"loss": 0.7138, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.4621848739495796e-06, |
|
"loss": 0.7322, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.4537815126050417e-06, |
|
"loss": 0.6883, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.445378151260504e-06, |
|
"loss": 0.7016, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.4369747899159664e-06, |
|
"loss": 0.7124, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.4285714285714285e-06, |
|
"loss": 0.7302, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.4201680672268907e-06, |
|
"loss": 0.7583, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.411764705882353e-06, |
|
"loss": 0.727, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.403361344537815e-06, |
|
"loss": 0.7506, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3949579831932775e-06, |
|
"loss": 0.7356, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3865546218487396e-06, |
|
"loss": 0.7154, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3781512605042018e-06, |
|
"loss": 0.7204, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.369747899159664e-06, |
|
"loss": 0.6537, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.361344537815126e-06, |
|
"loss": 0.6609, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.352941176470588e-06, |
|
"loss": 0.662, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3445378151260508e-06, |
|
"loss": 0.6924, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.336134453781513e-06, |
|
"loss": 0.6623, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.327731092436975e-06, |
|
"loss": 0.6464, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.319327731092437e-06, |
|
"loss": 0.6102, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3109243697478993e-06, |
|
"loss": 0.6213, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.302521008403362e-06, |
|
"loss": 0.6724, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.294117647058824e-06, |
|
"loss": 0.6709, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.285714285714286e-06, |
|
"loss": 0.654, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.277310924369748e-06, |
|
"loss": 0.6542, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.26890756302521e-06, |
|
"loss": 0.6449, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.260504201680672e-06, |
|
"loss": 0.6568, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2521008403361343e-06, |
|
"loss": 0.6495, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2436974789915964e-06, |
|
"loss": 0.6644, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2352941176470585e-06, |
|
"loss": 0.6548, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.226890756302521e-06, |
|
"loss": 0.6472, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2184873949579832e-06, |
|
"loss": 0.6447, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2100840336134454e-06, |
|
"loss": 0.6323, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2016806722689075e-06, |
|
"loss": 0.6511, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.1932773109243696e-06, |
|
"loss": 0.6498, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.184873949579832e-06, |
|
"loss": 0.6358, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.1764705882352943e-06, |
|
"loss": 0.6599, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.1680672268907565e-06, |
|
"loss": 0.6596, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.1596638655462186e-06, |
|
"loss": 0.6301, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.1512605042016808e-06, |
|
"loss": 0.6251, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.142857142857143e-06, |
|
"loss": 0.6552, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.1344537815126055e-06, |
|
"loss": 0.6479, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.1260504201680676e-06, |
|
"loss": 0.6602, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.1176470588235297e-06, |
|
"loss": 0.6412, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.109243697478992e-06, |
|
"loss": 0.6397, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.100840336134454e-06, |
|
"loss": 0.6677, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.0924369747899157e-06, |
|
"loss": 0.6144, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.084033613445378e-06, |
|
"loss": 0.6347, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.07563025210084e-06, |
|
"loss": 0.6562, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.0672268907563026e-06, |
|
"loss": 0.6583, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.0588235294117647e-06, |
|
"loss": 0.6759, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.050420168067227e-06, |
|
"loss": 0.6663, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.042016806722689e-06, |
|
"loss": 0.6551, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.033613445378151e-06, |
|
"loss": 0.6738, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.0252100840336132e-06, |
|
"loss": 0.6469, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.016806722689076e-06, |
|
"loss": 0.6549, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.008403361344538e-06, |
|
"loss": 0.6404, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3e-06, |
|
"loss": 0.6372, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.9915966386554622e-06, |
|
"loss": 0.6541, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.9831932773109244e-06, |
|
"loss": 0.6356, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.974789915966387e-06, |
|
"loss": 0.6241, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.966386554621849e-06, |
|
"loss": 0.6622, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.957983193277311e-06, |
|
"loss": 0.6708, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.949579831932773e-06, |
|
"loss": 0.6605, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.941176470588235e-06, |
|
"loss": 0.6521, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.9327731092436976e-06, |
|
"loss": 0.6373, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.9243697478991597e-06, |
|
"loss": 0.6655, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.915966386554622e-06, |
|
"loss": 0.6313, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.907563025210084e-06, |
|
"loss": 0.6155, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.899159663865546e-06, |
|
"loss": 0.6526, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.8907563025210087e-06, |
|
"loss": 0.6653, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.882352941176471e-06, |
|
"loss": 0.6351, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.873949579831933e-06, |
|
"loss": 0.6724, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.865546218487395e-06, |
|
"loss": 0.6386, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 0.6352, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.8487394957983194e-06, |
|
"loss": 0.6566, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.8403361344537815e-06, |
|
"loss": 0.6931, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.8319327731092437e-06, |
|
"loss": 0.6714, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.823529411764706e-06, |
|
"loss": 0.6605, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.815126050420168e-06, |
|
"loss": 0.6342, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.8067226890756305e-06, |
|
"loss": 0.6425, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.7983193277310926e-06, |
|
"loss": 0.6374, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.7899159663865548e-06, |
|
"loss": 0.6417, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.781512605042017e-06, |
|
"loss": 0.6414, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2.773109243697479e-06, |
|
"loss": 0.6314, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2.764705882352941e-06, |
|
"loss": 0.6606, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2.7563025210084033e-06, |
|
"loss": 0.6748, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2.7478991596638655e-06, |
|
"loss": 0.6099, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.7394957983193276e-06, |
|
"loss": 0.6424, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.7310924369747897e-06, |
|
"loss": 0.6311, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.7226890756302523e-06, |
|
"loss": 0.6463, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.7142857142857144e-06, |
|
"loss": 0.6723, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.7058823529411766e-06, |
|
"loss": 0.6463, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.6974789915966387e-06, |
|
"loss": 0.6764, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.689075630252101e-06, |
|
"loss": 0.6475, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.6806722689075634e-06, |
|
"loss": 0.6526, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.6722689075630255e-06, |
|
"loss": 0.6402, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.6638655462184873e-06, |
|
"loss": 0.6331, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.6554621848739494e-06, |
|
"loss": 0.6346, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.647058823529412e-06, |
|
"loss": 0.6306, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.638655462184874e-06, |
|
"loss": 0.6578, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.6302521008403362e-06, |
|
"loss": 0.6477, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.6218487394957984e-06, |
|
"loss": 0.6377, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.6134453781512605e-06, |
|
"loss": 0.6585, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.6050420168067226e-06, |
|
"loss": 0.6341, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.596638655462185e-06, |
|
"loss": 0.6534, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.5882352941176473e-06, |
|
"loss": 0.6521, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.5798319327731095e-06, |
|
"loss": 0.6457, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.571428571428571e-06, |
|
"loss": 0.6533, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.5630252100840338e-06, |
|
"loss": 0.655, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.554621848739496e-06, |
|
"loss": 0.6516, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.546218487394958e-06, |
|
"loss": 0.6452, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.53781512605042e-06, |
|
"loss": 0.6336, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.5294117647058823e-06, |
|
"loss": 0.6476, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.5210084033613444e-06, |
|
"loss": 0.6659, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.512605042016807e-06, |
|
"loss": 0.6405, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.504201680672269e-06, |
|
"loss": 0.6357, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.4957983193277313e-06, |
|
"loss": 0.6498, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.4873949579831934e-06, |
|
"loss": 0.6374, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.4789915966386555e-06, |
|
"loss": 0.6538, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.4705882352941177e-06, |
|
"loss": 0.6679, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.46218487394958e-06, |
|
"loss": 0.6247, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.453781512605042e-06, |
|
"loss": 0.6425, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.445378151260504e-06, |
|
"loss": 0.6356, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.4369747899159667e-06, |
|
"loss": 0.6557, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.428571428571429e-06, |
|
"loss": 0.6327, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.420168067226891e-06, |
|
"loss": 0.6267, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.411764705882353e-06, |
|
"loss": 0.6438, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.403361344537815e-06, |
|
"loss": 0.6407, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.3949579831932773e-06, |
|
"loss": 0.6389, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.3865546218487395e-06, |
|
"loss": 0.6279, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.3781512605042016e-06, |
|
"loss": 0.6104, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.3697478991596638e-06, |
|
"loss": 0.6451, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.361344537815126e-06, |
|
"loss": 0.6474, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.3529411764705885e-06, |
|
"loss": 0.6498, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.3445378151260506e-06, |
|
"loss": 0.6576, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.3361344537815127e-06, |
|
"loss": 0.6659, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.327731092436975e-06, |
|
"loss": 0.6441, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.319327731092437e-06, |
|
"loss": 0.645, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.310924369747899e-06, |
|
"loss": 0.6449, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.3025210084033617e-06, |
|
"loss": 0.6666, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.2941176470588234e-06, |
|
"loss": 0.6338, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.2857142857142856e-06, |
|
"loss": 0.6317, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.2773109243697477e-06, |
|
"loss": 0.6484, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.2689075630252102e-06, |
|
"loss": 0.6445, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.2605042016806724e-06, |
|
"loss": 0.6414, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.2521008403361345e-06, |
|
"loss": 0.6443, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.2436974789915967e-06, |
|
"loss": 0.6488, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.235294117647059e-06, |
|
"loss": 0.6479, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.2268907563025214e-06, |
|
"loss": 0.6078, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.2184873949579835e-06, |
|
"loss": 0.6432, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.2100840336134456e-06, |
|
"loss": 0.6512, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.2016806722689073e-06, |
|
"loss": 0.6522, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.1932773109243695e-06, |
|
"loss": 0.6406, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.184873949579832e-06, |
|
"loss": 0.6389, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.176470588235294e-06, |
|
"loss": 0.6268, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.1680672268907563e-06, |
|
"loss": 0.6258, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.1596638655462185e-06, |
|
"loss": 0.6555, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.1512605042016806e-06, |
|
"loss": 0.6401, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.142857142857143e-06, |
|
"loss": 0.6552, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.1344537815126053e-06, |
|
"loss": 0.6703, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.1260504201680674e-06, |
|
"loss": 0.6512, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.1176470588235296e-06, |
|
"loss": 0.6415, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.1092436974789913e-06, |
|
"loss": 0.6572, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.100840336134454e-06, |
|
"loss": 0.6622, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.092436974789916e-06, |
|
"loss": 0.6691, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.084033613445378e-06, |
|
"loss": 0.6422, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.0756302521008403e-06, |
|
"loss": 0.6557, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.0672268907563024e-06, |
|
"loss": 0.6606, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.058823529411765e-06, |
|
"loss": 0.6533, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.050420168067227e-06, |
|
"loss": 0.6407, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.0420168067226892e-06, |
|
"loss": 0.6524, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.0336134453781514e-06, |
|
"loss": 0.645, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.0252100840336135e-06, |
|
"loss": 0.6445, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.0168067226890756e-06, |
|
"loss": 0.6602, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.0084033613445378e-06, |
|
"loss": 0.6412, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2e-06, |
|
"loss": 0.6525, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.991596638655462e-06, |
|
"loss": 0.6369, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.983193277310924e-06, |
|
"loss": 0.6349, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.9747899159663867e-06, |
|
"loss": 0.6658, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.966386554621849e-06, |
|
"loss": 0.6326, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.957983193277311e-06, |
|
"loss": 0.6361, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.949579831932773e-06, |
|
"loss": 0.6565, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.9411764705882353e-06, |
|
"loss": 0.6351, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.932773109243698e-06, |
|
"loss": 0.6596, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.92436974789916e-06, |
|
"loss": 0.6425, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.9159663865546217e-06, |
|
"loss": 0.647, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.907563025210084e-06, |
|
"loss": 0.653, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.8991596638655462e-06, |
|
"loss": 0.663, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.8907563025210083e-06, |
|
"loss": 0.6504, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.8823529411764707e-06, |
|
"loss": 0.6495, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.8739495798319328e-06, |
|
"loss": 0.6636, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.865546218487395e-06, |
|
"loss": 0.6387, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.8571428571428573e-06, |
|
"loss": 0.6671, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.8487394957983194e-06, |
|
"loss": 0.6744, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.8403361344537818e-06, |
|
"loss": 0.6377, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.831932773109244e-06, |
|
"loss": 0.66, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.8235294117647058e-06, |
|
"loss": 0.6387, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.815126050420168e-06, |
|
"loss": 0.6523, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.8067226890756301e-06, |
|
"loss": 0.6298, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.7983193277310925e-06, |
|
"loss": 0.635, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.7899159663865546e-06, |
|
"loss": 0.6395, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.7815126050420167e-06, |
|
"loss": 0.6644, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.773109243697479e-06, |
|
"loss": 0.6434, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.7647058823529412e-06, |
|
"loss": 0.6566, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.7563025210084036e-06, |
|
"loss": 0.6506, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.7478991596638657e-06, |
|
"loss": 0.6525, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.7394957983193279e-06, |
|
"loss": 0.6327, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.7310924369747898e-06, |
|
"loss": 0.6519, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.722689075630252e-06, |
|
"loss": 0.6306, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.7142857142857143e-06, |
|
"loss": 0.639, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.7058823529411764e-06, |
|
"loss": 0.6392, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.6974789915966388e-06, |
|
"loss": 0.6482, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.6890756302521009e-06, |
|
"loss": 0.6739, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.680672268907563e-06, |
|
"loss": 0.6375, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.6722689075630254e-06, |
|
"loss": 0.6422, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.6638655462184875e-06, |
|
"loss": 0.6439, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.6554621848739497e-06, |
|
"loss": 0.6437, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.647058823529412e-06, |
|
"loss": 0.6452, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.638655462184874e-06, |
|
"loss": 0.632, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.630252100840336e-06, |
|
"loss": 0.6265, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.6218487394957982e-06, |
|
"loss": 0.6403, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.6134453781512606e-06, |
|
"loss": 0.6337, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.6050420168067227e-06, |
|
"loss": 0.6491, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.5966386554621848e-06, |
|
"loss": 0.6457, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.5882352941176472e-06, |
|
"loss": 0.6451, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.5798319327731093e-06, |
|
"loss": 0.6409, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.5714285714285714e-06, |
|
"loss": 0.6372, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.5630252100840338e-06, |
|
"loss": 0.6497, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.554621848739496e-06, |
|
"loss": 0.652, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.5462184873949579e-06, |
|
"loss": 0.6405, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.53781512605042e-06, |
|
"loss": 0.6295, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.5294117647058823e-06, |
|
"loss": 0.673, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.5210084033613445e-06, |
|
"loss": 0.6458, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.5126050420168066e-06, |
|
"loss": 0.6345, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.504201680672269e-06, |
|
"loss": 0.6352, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.4957983193277311e-06, |
|
"loss": 0.6429, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.4873949579831935e-06, |
|
"loss": 0.6468, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.4789915966386556e-06, |
|
"loss": 0.6425, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.4705882352941175e-06, |
|
"loss": 0.6462, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.4621848739495799e-06, |
|
"loss": 0.6341, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.453781512605042e-06, |
|
"loss": 0.63, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.4453781512605044e-06, |
|
"loss": 0.6397, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.4369747899159665e-06, |
|
"loss": 0.655, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 0.6586, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.4201680672268908e-06, |
|
"loss": 0.6419, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.411764705882353e-06, |
|
"loss": 0.6472, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.4033613445378153e-06, |
|
"loss": 0.6337, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.3949579831932774e-06, |
|
"loss": 0.648, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.3865546218487395e-06, |
|
"loss": 0.6655, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.3781512605042017e-06, |
|
"loss": 0.6463, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.3697478991596638e-06, |
|
"loss": 0.656, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.3613445378151261e-06, |
|
"loss": 0.6236, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.3529411764705883e-06, |
|
"loss": 0.6536, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.3445378151260504e-06, |
|
"loss": 0.6288, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.3361344537815128e-06, |
|
"loss": 0.636, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.3277310924369747e-06, |
|
"loss": 0.6292, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.319327731092437e-06, |
|
"loss": 0.6474, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.3109243697478992e-06, |
|
"loss": 0.6468, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.3025210084033613e-06, |
|
"loss": 0.6522, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.2941176470588237e-06, |
|
"loss": 0.6227, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.2857142857142856e-06, |
|
"loss": 0.6327, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.277310924369748e-06, |
|
"loss": 0.6501, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.26890756302521e-06, |
|
"loss": 0.6375, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.2605042016806722e-06, |
|
"loss": 0.6385, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.2521008403361346e-06, |
|
"loss": 0.6374, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.2436974789915967e-06, |
|
"loss": 0.6332, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.2352941176470588e-06, |
|
"loss": 0.635, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.226890756302521e-06, |
|
"loss": 0.6404, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.2184873949579833e-06, |
|
"loss": 0.6256, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.2100840336134455e-06, |
|
"loss": 0.6076, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.2016806722689076e-06, |
|
"loss": 0.6469, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.1932773109243697e-06, |
|
"loss": 0.6477, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.1848739495798319e-06, |
|
"loss": 0.6408, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.1764705882352942e-06, |
|
"loss": 0.6448, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.1680672268907564e-06, |
|
"loss": 0.6288, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.1596638655462185e-06, |
|
"loss": 0.6398, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.1512605042016808e-06, |
|
"loss": 0.6497, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.1428571428571428e-06, |
|
"loss": 0.6583, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.1344537815126051e-06, |
|
"loss": 0.6346, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.1260504201680673e-06, |
|
"loss": 0.6394, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.1176470588235294e-06, |
|
"loss": 0.6409, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.1092436974789917e-06, |
|
"loss": 0.647, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.1008403361344537e-06, |
|
"loss": 0.6454, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.092436974789916e-06, |
|
"loss": 0.6292, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.0840336134453782e-06, |
|
"loss": 0.6492, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.0756302521008403e-06, |
|
"loss": 0.6405, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.0672268907563026e-06, |
|
"loss": 0.655, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.0588235294117648e-06, |
|
"loss": 0.6375, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.050420168067227e-06, |
|
"loss": 0.6613, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.042016806722689e-06, |
|
"loss": 0.6513, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.0336134453781512e-06, |
|
"loss": 0.6345, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.0252100840336135e-06, |
|
"loss": 0.6273, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.0168067226890757e-06, |
|
"loss": 0.6186, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.0084033613445378e-06, |
|
"loss": 0.6356, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1e-06, |
|
"loss": 0.6361, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.91596638655462e-07, |
|
"loss": 0.6363, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.831932773109244e-07, |
|
"loss": 0.6368, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.747899159663866e-07, |
|
"loss": 0.6479, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.66386554621849e-07, |
|
"loss": 0.6361, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.579831932773109e-07, |
|
"loss": 0.6435, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.495798319327731e-07, |
|
"loss": 0.621, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.411764705882353e-07, |
|
"loss": 0.6486, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.327731092436975e-07, |
|
"loss": 0.6738, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.243697478991597e-07, |
|
"loss": 0.6344, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.15966386554622e-07, |
|
"loss": 0.6547, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.07563025210084e-07, |
|
"loss": 0.616, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.991596638655462e-07, |
|
"loss": 0.6652, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.907563025210084e-07, |
|
"loss": 0.6593, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.823529411764706e-07, |
|
"loss": 0.6381, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.739495798319329e-07, |
|
"loss": 0.6556, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.655462184873949e-07, |
|
"loss": 0.6465, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.571428571428571e-07, |
|
"loss": 0.6563, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.487394957983194e-07, |
|
"loss": 0.6384, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.403361344537815e-07, |
|
"loss": 0.629, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.319327731092438e-07, |
|
"loss": 0.6367, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.23529411764706e-07, |
|
"loss": 0.6457, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.15126050420168e-07, |
|
"loss": 0.6242, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.067226890756303e-07, |
|
"loss": 0.6495, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.983193277310924e-07, |
|
"loss": 0.6441, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.899159663865547e-07, |
|
"loss": 0.6172, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.815126050420169e-07, |
|
"loss": 0.6385, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.731092436974789e-07, |
|
"loss": 0.6298, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.647058823529412e-07, |
|
"loss": 0.6435, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.563025210084033e-07, |
|
"loss": 0.6407, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.478991596638656e-07, |
|
"loss": 0.6434, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.394957983193278e-07, |
|
"loss": 0.639, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.310924369747899e-07, |
|
"loss": 0.6611, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.226890756302522e-07, |
|
"loss": 0.6266, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.142857142857143e-07, |
|
"loss": 0.6462, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.058823529411765e-07, |
|
"loss": 0.6476, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 6.974789915966387e-07, |
|
"loss": 0.654, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 6.890756302521008e-07, |
|
"loss": 0.6634, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 6.806722689075631e-07, |
|
"loss": 0.651, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 6.722689075630252e-07, |
|
"loss": 0.6557, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 6.638655462184873e-07, |
|
"loss": 0.64, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 6.554621848739496e-07, |
|
"loss": 0.6361, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 6.470588235294118e-07, |
|
"loss": 0.6434, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 6.38655462184874e-07, |
|
"loss": 0.6376, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 6.302521008403361e-07, |
|
"loss": 0.6415, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 6.218487394957984e-07, |
|
"loss": 0.6419, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 6.134453781512605e-07, |
|
"loss": 0.654, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 6.050420168067227e-07, |
|
"loss": 0.6393, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 5.966386554621849e-07, |
|
"loss": 0.6239, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 5.882352941176471e-07, |
|
"loss": 0.6343, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 5.798319327731093e-07, |
|
"loss": 0.6712, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 5.714285714285714e-07, |
|
"loss": 0.636, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 5.630252100840336e-07, |
|
"loss": 0.6427, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 5.546218487394959e-07, |
|
"loss": 0.6402, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 5.46218487394958e-07, |
|
"loss": 0.642, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 5.378151260504201e-07, |
|
"loss": 0.6279, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 5.294117647058824e-07, |
|
"loss": 0.6566, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 5.210084033613445e-07, |
|
"loss": 0.6504, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 5.126050420168068e-07, |
|
"loss": 0.6647, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 5.042016806722689e-07, |
|
"loss": 0.6461, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.95798319327731e-07, |
|
"loss": 0.6468, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.873949579831933e-07, |
|
"loss": 0.6488, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.789915966386554e-07, |
|
"loss": 0.6778, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.7058823529411767e-07, |
|
"loss": 0.6471, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.6218487394957986e-07, |
|
"loss": 0.6335, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.53781512605042e-07, |
|
"loss": 0.6581, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.453781512605042e-07, |
|
"loss": 0.6384, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.3697478991596643e-07, |
|
"loss": 0.6279, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.2857142857142857e-07, |
|
"loss": 0.6318, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.2016806722689076e-07, |
|
"loss": 0.6521, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.11764705882353e-07, |
|
"loss": 0.64, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.0336134453781514e-07, |
|
"loss": 0.6286, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.9495798319327733e-07, |
|
"loss": 0.6287, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.8655462184873946e-07, |
|
"loss": 0.6286, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.7815126050420166e-07, |
|
"loss": 0.6454, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.697478991596639e-07, |
|
"loss": 0.6305, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.613445378151261e-07, |
|
"loss": 0.6136, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.529411764705882e-07, |
|
"loss": 0.6458, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.445378151260504e-07, |
|
"loss": 0.6486, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.361344537815126e-07, |
|
"loss": 0.6313, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.277310924369748e-07, |
|
"loss": 0.6246, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.19327731092437e-07, |
|
"loss": 0.6345, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.109243697478992e-07, |
|
"loss": 0.6417, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.0252100840336137e-07, |
|
"loss": 0.635, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.9411764705882356e-07, |
|
"loss": 0.6473, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.857142857142857e-07, |
|
"loss": 0.6351, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.7731092436974794e-07, |
|
"loss": 0.6268, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.689075630252101e-07, |
|
"loss": 0.6251, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.6050420168067226e-07, |
|
"loss": 0.6354, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.5210084033613445e-07, |
|
"loss": 0.6434, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.4369747899159664e-07, |
|
"loss": 0.6279, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.3529411764705883e-07, |
|
"loss": 0.627, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.26890756302521e-07, |
|
"loss": 0.6324, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.1848739495798321e-07, |
|
"loss": 0.6511, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.1008403361344538e-07, |
|
"loss": 0.6578, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.0168067226890757e-07, |
|
"loss": 0.65, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.9327731092436973e-07, |
|
"loss": 0.6523, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.8487394957983195e-07, |
|
"loss": 0.6384, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.764705882352941e-07, |
|
"loss": 0.636, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.680672268907563e-07, |
|
"loss": 0.6417, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.596638655462185e-07, |
|
"loss": 0.6311, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.5126050420168068e-07, |
|
"loss": 0.6418, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.4285714285714285e-07, |
|
"loss": 0.6569, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.3445378151260504e-07, |
|
"loss": 0.6397, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.2605042016806723e-07, |
|
"loss": 0.6338, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.1764705882352942e-07, |
|
"loss": 0.6209, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.0924369747899161e-07, |
|
"loss": 0.6602, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.0084033613445378e-07, |
|
"loss": 0.6634, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 9.243697478991597e-08, |
|
"loss": 0.6433, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 8.403361344537815e-08, |
|
"loss": 0.6666, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 7.563025210084034e-08, |
|
"loss": 0.6515, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.722689075630252e-08, |
|
"loss": 0.6698, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.882352941176471e-08, |
|
"loss": 0.6463, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.042016806722689e-08, |
|
"loss": 0.6522, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.2016806722689076e-08, |
|
"loss": 0.6149, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.361344537815126e-08, |
|
"loss": 0.6234, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.5210084033613446e-08, |
|
"loss": 0.642, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.680672268907563e-08, |
|
"loss": 0.6412, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 8.403361344537815e-09, |
|
"loss": 0.6388, |
|
"step": 814 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 814, |
|
"num_train_epochs": 2, |
|
"save_steps": 407, |
|
"total_flos": 2.275967145406261e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|