|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9880478087649402, |
|
"global_step": 375, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 1.5044, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.4514, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5e-06, |
|
"loss": 1.4498, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.4186, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.3586, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2792, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.1666666666666668e-05, |
|
"loss": 1.2723, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.2244, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 1.1807, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.1877, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.8333333333333333e-05, |
|
"loss": 1.163, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2e-05, |
|
"loss": 1.1489, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999625498303936e-05, |
|
"loss": 1.1348, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999850202126604e-05, |
|
"loss": 1.1368, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9996629653035128e-05, |
|
"loss": 1.1382, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999400853385221e-05, |
|
"loss": 1.1297, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9990638860040007e-05, |
|
"loss": 1.1365, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9986520883988233e-05, |
|
"loss": 1.1292, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9981654914134684e-05, |
|
"loss": 1.1278, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9976041314942156e-05, |
|
"loss": 1.106, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9969680506871138e-05, |
|
"loss": 1.1053, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.99625729663483e-05, |
|
"loss": 1.11, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9954719225730847e-05, |
|
"loss": 1.1139, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9946119873266615e-05, |
|
"loss": 1.1226, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9936775553050017e-05, |
|
"loss": 1.046, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9926686964973813e-05, |
|
"loss": 1.1061, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9915854864676665e-05, |
|
"loss": 1.0507, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9904280063486563e-05, |
|
"loss": 1.0609, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9891963428360043e-05, |
|
"loss": 1.0782, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9878905881817254e-05, |
|
"loss": 1.1025, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9865108401872856e-05, |
|
"loss": 1.0876, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9850572021962788e-05, |
|
"loss": 1.1006, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9835297830866827e-05, |
|
"loss": 1.0444, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9819286972627066e-05, |
|
"loss": 1.0563, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.980254064646223e-05, |
|
"loss": 1.0656, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9785060106677818e-05, |
|
"loss": 1.0823, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.976684666257219e-05, |
|
"loss": 1.0634, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9747901678338496e-05, |
|
"loss": 1.0581, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9728226572962474e-05, |
|
"loss": 1.0588, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9707822820116193e-05, |
|
"loss": 1.0704, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9686691948047665e-05, |
|
"loss": 1.0822, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.966483553946637e-05, |
|
"loss": 1.0621, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.964225523142473e-05, |
|
"loss": 1.0648, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9618952715195476e-05, |
|
"loss": 1.0837, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9594929736144978e-05, |
|
"loss": 1.0543, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9570188093602512e-05, |
|
"loss": 1.0535, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.95447296407255e-05, |
|
"loss": 1.0533, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9518556284360696e-05, |
|
"loss": 1.0191, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9491669984901377e-05, |
|
"loss": 1.0395, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9464072756140487e-05, |
|
"loss": 1.0173, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9435766665119823e-05, |
|
"loss": 1.048, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9406753831975202e-05, |
|
"loss": 1.0496, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9377036429777673e-05, |
|
"loss": 1.0616, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.934661668437073e-05, |
|
"loss": 1.0504, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9315496874203637e-05, |
|
"loss": 1.0399, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9283679330160726e-05, |
|
"loss": 1.0416, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9251166435386837e-05, |
|
"loss": 1.0175, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.921796062510882e-05, |
|
"loss": 1.0285, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9184064386453127e-05, |
|
"loss": 1.0312, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9149480258259535e-05, |
|
"loss": 1.0463, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.911421083089097e-05, |
|
"loss": 1.0521, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.907825874603951e-05, |
|
"loss": 1.0276, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9041626696528503e-05, |
|
"loss": 1.0256, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9004317426110888e-05, |
|
"loss": 1.0403, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8966333729263674e-05, |
|
"loss": 1.0687, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.892767845097864e-05, |
|
"loss": 1.0394, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8888354486549238e-05, |
|
"loss": 0.9965, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8848364781353744e-05, |
|
"loss": 1.0397, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8807712330634645e-05, |
|
"loss": 1.027, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8766400179274287e-05, |
|
"loss": 1.0201, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8724431421566822e-05, |
|
"loss": 1.0081, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.868180920098644e-05, |
|
"loss": 0.9956, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8638536709951916e-05, |
|
"loss": 1.0127, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8594617189587515e-05, |
|
"loss": 0.9887, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8550053929480202e-05, |
|
"loss": 1.0266, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8504850267433278e-05, |
|
"loss": 1.0102, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8459009589216364e-05, |
|
"loss": 0.9728, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8412535328311813e-05, |
|
"loss": 0.9944, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8365430965657527e-05, |
|
"loss": 1.0591, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8317700029386245e-05, |
|
"loss": 1.0032, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.826934609456129e-05, |
|
"loss": 1.0094, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8220372782908778e-05, |
|
"loss": 1.0118, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8170783762546363e-05, |
|
"loss": 1.0254, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8120582747708503e-05, |
|
"loss": 1.0108, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8069773498468224e-05, |
|
"loss": 0.9887, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8018359820455535e-05, |
|
"loss": 1.0241, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.796634556457236e-05, |
|
"loss": 1.0179, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.791373462670411e-05, |
|
"loss": 1.0357, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7860530947427878e-05, |
|
"loss": 0.9963, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.780673851171728e-05, |
|
"loss": 1.0084, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7752361348644012e-05, |
|
"loss": 1.0178, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.769740353107602e-05, |
|
"loss": 0.9936, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7641869175372493e-05, |
|
"loss": 0.9838, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7585762441075504e-05, |
|
"loss": 1.0163, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.752908753059849e-05, |
|
"loss": 0.9954, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7471848688911465e-05, |
|
"loss": 1.0138, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7414050203223092e-05, |
|
"loss": 0.997, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.735569640265955e-05, |
|
"loss": 1.011, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.72967916579403e-05, |
|
"loss": 0.9625, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.72373403810507e-05, |
|
"loss": 1.0054, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7177347024911562e-05, |
|
"loss": 1.0169, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7116816083045603e-05, |
|
"loss": 0.9911, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7055752089240907e-05, |
|
"loss": 1.0246, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6994159617211318e-05, |
|
"loss": 1.0024, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6932043280253892e-05, |
|
"loss": 0.9886, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.686940773090333e-05, |
|
"loss": 0.9775, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6806257660583534e-05, |
|
"loss": 0.9755, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6742597799256182e-05, |
|
"loss": 1.0068, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6678432915066488e-05, |
|
"loss": 0.978, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6613767813986045e-05, |
|
"loss": 1.002, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6548607339452853e-05, |
|
"loss": 0.9859, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.648295637200856e-05, |
|
"loss": 0.9753, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.64168198289329e-05, |
|
"loss": 0.9771, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6350202663875385e-05, |
|
"loss": 0.9843, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.628310986648427e-05, |
|
"loss": 0.9507, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.621554646203284e-05, |
|
"loss": 0.9875, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.614751751104301e-05, |
|
"loss": 0.9724, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.607902810890628e-05, |
|
"loss": 0.9572, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.601008338550211e-05, |
|
"loss": 0.9905, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5940688504813664e-05, |
|
"loss": 0.9758, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5870848664541046e-05, |
|
"loss": 0.9927, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5800569095711983e-05, |
|
"loss": 0.9696, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5729855062290024e-05, |
|
"loss": 0.9775, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.565871186078025e-05, |
|
"loss": 0.9904, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.55871448198326e-05, |
|
"loss": 0.9796, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.551515929984271e-05, |
|
"loss": 0.8877, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5442760692550443e-05, |
|
"loss": 0.7575, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5369954420636048e-05, |
|
"loss": 0.7759, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.529674593731399e-05, |
|
"loss": 0.7747, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5223140725924494e-05, |
|
"loss": 0.7596, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5149144299522874e-05, |
|
"loss": 0.7461, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5074762200466557e-05, |
|
"loss": 0.7336, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.7054, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4924863297837378e-05, |
|
"loss": 0.7358, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4849357721743169e-05, |
|
"loss": 0.7282, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4773488927110633e-05, |
|
"loss": 0.7195, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4697262596538227e-05, |
|
"loss": 0.7279, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4620684439403962e-05, |
|
"loss": 0.7313, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.454376019143779e-05, |
|
"loss": 0.7415, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4466495614291977e-05, |
|
"loss": 0.7305, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.438889649510956e-05, |
|
"loss": 0.7545, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4310968646090884e-05, |
|
"loss": 0.7307, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.423271790405828e-05, |
|
"loss": 0.7327, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4154150130018867e-05, |
|
"loss": 0.7417, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4075271208725572e-05, |
|
"loss": 0.7491, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.3996087048236357e-05, |
|
"loss": 0.7422, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3916603579471705e-05, |
|
"loss": 0.7551, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3836826755770386e-05, |
|
"loss": 0.7356, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3756762552443555e-05, |
|
"loss": 0.7139, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3676416966327201e-05, |
|
"loss": 0.7411, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3595796015332986e-05, |
|
"loss": 0.7511, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3514905737997474e-05, |
|
"loss": 0.747, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3433752193029888e-05, |
|
"loss": 0.7302, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3352341458858264e-05, |
|
"loss": 0.7489, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3270679633174219e-05, |
|
"loss": 0.7534, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.318877283247619e-05, |
|
"loss": 0.7423, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3106627191611333e-05, |
|
"loss": 0.7141, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3024248863316012e-05, |
|
"loss": 0.7443, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2941644017754964e-05, |
|
"loss": 0.7383, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2858818842059145e-05, |
|
"loss": 0.7444, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2775779539862305e-05, |
|
"loss": 0.7489, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2692532330836346e-05, |
|
"loss": 0.7266, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2609083450225468e-05, |
|
"loss": 0.7159, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2525439148379127e-05, |
|
"loss": 0.7362, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2441605690283915e-05, |
|
"loss": 0.7174, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2357589355094275e-05, |
|
"loss": 0.7137, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2273396435662212e-05, |
|
"loss": 0.723, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.218903323806595e-05, |
|
"loss": 0.7185, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2104506081137608e-05, |
|
"loss": 0.7306, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2019821295989913e-05, |
|
"loss": 0.7213, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.1934985225541998e-05, |
|
"loss": 0.7159, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1850004224044315e-05, |
|
"loss": 0.7298, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1764884656602711e-05, |
|
"loss": 0.7324, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1679632898701649e-05, |
|
"loss": 0.7354, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1594255335726725e-05, |
|
"loss": 0.7195, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1508758362486358e-05, |
|
"loss": 0.7212, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1423148382732854e-05, |
|
"loss": 0.7272, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.133743180868273e-05, |
|
"loss": 0.7302, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.125161506053646e-05, |
|
"loss": 0.7399, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1165704565997593e-05, |
|
"loss": 0.7059, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1079706759791311e-05, |
|
"loss": 0.733, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.0993628083182468e-05, |
|
"loss": 0.7086, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0907474983493144e-05, |
|
"loss": 0.7396, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0821253913619727e-05, |
|
"loss": 0.7405, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0734971331549604e-05, |
|
"loss": 0.7121, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.064863369987743e-05, |
|
"loss": 0.7149, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0562247485321116e-05, |
|
"loss": 0.7227, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0475819158237426e-05, |
|
"loss": 0.7337, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0389355192137379e-05, |
|
"loss": 0.7205, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0302862063201367e-05, |
|
"loss": 0.7189, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0216346249794087e-05, |
|
"loss": 0.7051, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.012981423197931e-05, |
|
"loss": 0.718, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0043272491034523e-05, |
|
"loss": 0.7197, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.956727508965482e-06, |
|
"loss": 0.7101, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.870185768020694e-06, |
|
"loss": 0.7415, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.783653750205916e-06, |
|
"loss": 0.7303, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.697137936798635e-06, |
|
"loss": 0.6957, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.610644807862625e-06, |
|
"loss": 0.699, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.524180841762577e-06, |
|
"loss": 0.7184, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.437752514678888e-06, |
|
"loss": 0.7473, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.351366300122569e-06, |
|
"loss": 0.7197, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.265028668450403e-06, |
|
"loss": 0.7005, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.178746086380274e-06, |
|
"loss": 0.7279, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.092525016506858e-06, |
|
"loss": 0.7326, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.006371916817533e-06, |
|
"loss": 0.7041, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.920293240208694e-06, |
|
"loss": 0.714, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.83429543400241e-06, |
|
"loss": 0.7057, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.748384939463543e-06, |
|
"loss": 0.7059, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.662568191317273e-06, |
|
"loss": 0.7115, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.576851617267151e-06, |
|
"loss": 0.707, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.491241637513644e-06, |
|
"loss": 0.7443, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.405744664273278e-06, |
|
"loss": 0.7306, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.320367101298351e-06, |
|
"loss": 0.7078, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.235115343397295e-06, |
|
"loss": 0.7113, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.149995775955686e-06, |
|
"loss": 0.7032, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.065014774458004e-06, |
|
"loss": 0.716, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 7.980178704010089e-06, |
|
"loss": 0.7612, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.895493918862395e-06, |
|
"loss": 0.6952, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.810966761934053e-06, |
|
"loss": 0.7036, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.726603564337791e-06, |
|
"loss": 0.7083, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.642410644905726e-06, |
|
"loss": 0.7207, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.558394309716088e-06, |
|
"loss": 0.7083, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.474560851620873e-06, |
|
"loss": 0.7454, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.390916549774536e-06, |
|
"loss": 0.7308, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.307467669163655e-06, |
|
"loss": 0.7114, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.224220460137701e-06, |
|
"loss": 0.7021, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.141181157940859e-06, |
|
"loss": 0.7276, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.058355982245038e-06, |
|
"loss": 0.711, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 6.97575113668399e-06, |
|
"loss": 0.7256, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.893372808388674e-06, |
|
"loss": 0.7044, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.8112271675238154e-06, |
|
"loss": 0.6723, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.729320366825785e-06, |
|
"loss": 0.7268, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.647658541141735e-06, |
|
"loss": 0.7028, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.566247806970119e-06, |
|
"loss": 0.729, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.485094262002529e-06, |
|
"loss": 0.6692, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.404203984667019e-06, |
|
"loss": 0.6896, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.323583033672799e-06, |
|
"loss": 0.715, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.24323744755645e-06, |
|
"loss": 0.7148, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.163173244229618e-06, |
|
"loss": 0.7357, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.083396420528298e-06, |
|
"loss": 0.71, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.003912951763644e-06, |
|
"loss": 0.696, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.924728791274432e-06, |
|
"loss": 0.6842, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.845849869981137e-06, |
|
"loss": 0.7091, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.767282095941725e-06, |
|
"loss": 0.6849, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.68903135390912e-06, |
|
"loss": 0.7449, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.611103504890444e-06, |
|
"loss": 0.7074, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.533504385708024e-06, |
|
"loss": 0.7138, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.45623980856221e-06, |
|
"loss": 0.7145, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.379315560596037e-06, |
|
"loss": 0.7426, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.302737403461778e-06, |
|
"loss": 0.6741, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.226511072889371e-06, |
|
"loss": 0.6832, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.1506422782568345e-06, |
|
"loss": 0.5329, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.075136702162622e-06, |
|
"loss": 0.5358, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 0.5402, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.925237799533445e-06, |
|
"loss": 0.5159, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.85085570047713e-06, |
|
"loss": 0.5301, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.776859274075506e-06, |
|
"loss": 0.535, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.703254062686017e-06, |
|
"loss": 0.5051, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.6300455793639565e-06, |
|
"loss": 0.5253, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.557239307449562e-06, |
|
"loss": 0.5146, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.4848407001572945e-06, |
|
"loss": 0.5272, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.412855180167406e-06, |
|
"loss": 0.531, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.341288139219752e-06, |
|
"loss": 0.5101, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.270144937709981e-06, |
|
"loss": 0.4964, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.19943090428802e-06, |
|
"loss": 0.5403, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.1291513354589576e-06, |
|
"loss": 0.5303, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.059311495186338e-06, |
|
"loss": 0.5164, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.989916614497891e-06, |
|
"loss": 0.486, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.9209718910937174e-06, |
|
"loss": 0.4951, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.852482488956992e-06, |
|
"loss": 0.5051, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.784453537967161e-06, |
|
"loss": 0.5312, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.7168901335157313e-06, |
|
"loss": 0.481, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.6497973361246153e-06, |
|
"loss": 0.5259, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.583180171067101e-06, |
|
"loss": 0.5091, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.517043627991441e-06, |
|
"loss": 0.5071, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.4513926605471504e-06, |
|
"loss": 0.5263, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.3862321860139578e-06, |
|
"loss": 0.5056, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3215670849335156e-06, |
|
"loss": 0.5187, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.257402200743821e-06, |
|
"loss": 0.521, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.19374233941647e-06, |
|
"loss": 0.5028, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.1305922690966705e-06, |
|
"loss": 0.4968, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.0679567197461135e-06, |
|
"loss": 0.4963, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.005840382788685e-06, |
|
"loss": 0.5082, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.944247910759097e-06, |
|
"loss": 0.5287, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.8831839169543998e-06, |
|
"loss": 0.5074, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8226529750884403e-06, |
|
"loss": 0.5049, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.7626596189492983e-06, |
|
"loss": 0.477, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.7032083420597e-06, |
|
"loss": 0.5196, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.6443035973404497e-06, |
|
"loss": 0.5186, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.585949796776912e-06, |
|
"loss": 0.4904, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.528151311088537e-06, |
|
"loss": 0.5234, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.470912469401512e-06, |
|
"loss": 0.4933, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.414237558924496e-06, |
|
"loss": 0.5143, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.3581308246275103e-06, |
|
"loss": 0.5168, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.302596468923981e-06, |
|
"loss": 0.5049, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.247638651355991e-06, |
|
"loss": 0.517, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.1932614882827196e-06, |
|
"loss": 0.4923, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.1394690525721275e-06, |
|
"loss": 0.4897, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.0862653732958914e-06, |
|
"loss": 0.5118, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.03365443542764e-06, |
|
"loss": 0.508, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.9816401795444664e-06, |
|
"loss": 0.4983, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.93022650153178e-06, |
|
"loss": 0.5127, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.8794172522915022e-06, |
|
"loss": 0.5146, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.829216237453637e-06, |
|
"loss": 0.5289, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.7796272170912255e-06, |
|
"loss": 0.4871, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.730653905438714e-06, |
|
"loss": 0.5142, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.6822999706137565e-06, |
|
"loss": 0.5167, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.6345690343424758e-06, |
|
"loss": 0.5056, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.587464671688187e-06, |
|
"loss": 0.5192, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.540990410783636e-06, |
|
"loss": 0.5187, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.495149732566723e-06, |
|
"loss": 0.5175, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4499460705198e-06, |
|
"loss": 0.5055, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4053828104124867e-06, |
|
"loss": 0.5286, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.361463290048085e-06, |
|
"loss": 0.526, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3181907990135624e-06, |
|
"loss": 0.5038, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.2755685784331784e-06, |
|
"loss": 0.4852, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2335998207257138e-06, |
|
"loss": 0.5, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.1922876693653584e-06, |
|
"loss": 0.5182, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.1516352186462588e-06, |
|
"loss": 0.5406, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1116455134507665e-06, |
|
"loss": 0.488, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.0723215490213635e-06, |
|
"loss": 0.5261, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0336662707363287e-06, |
|
"loss": 0.5127, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 9.95682573889114e-07, |
|
"loss": 0.5145, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.583733034714982e-07, |
|
"loss": 0.4902, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.217412539604942e-07, |
|
"loss": 0.4974, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 8.857891691090336e-07, |
|
"loss": 0.4695, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.505197417404687e-07, |
|
"loss": 0.4944, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.159356135468721e-07, |
|
"loss": 0.4966, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.820393748911792e-07, |
|
"loss": 0.5097, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.488335646131628e-07, |
|
"loss": 0.525, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.163206698392744e-07, |
|
"loss": 0.4993, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.845031257963619e-07, |
|
"loss": 0.489, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.53383315629268e-07, |
|
"loss": 0.4964, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.229635702223325e-07, |
|
"loss": 0.5255, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.932461680248014e-07, |
|
"loss": 0.5094, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.64233334880181e-07, |
|
"loss": 0.4925, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.359272438595153e-07, |
|
"loss": 0.511, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.083300150986259e-07, |
|
"loss": 0.5087, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.814437156393048e-07, |
|
"loss": 0.5189, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.5527035927450337e-07, |
|
"loss": 0.4896, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.298119063974915e-07, |
|
"loss": 0.5004, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.0507026385502747e-07, |
|
"loss": 0.5096, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 3.810472848045266e-07, |
|
"loss": 0.5195, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.5774476857527107e-07, |
|
"loss": 0.5102, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.3516446053363015e-07, |
|
"loss": 0.4774, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.1330805195233684e-07, |
|
"loss": 0.5122, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 2.921771798838069e-07, |
|
"loss": 0.4839, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 2.717734270375272e-07, |
|
"loss": 0.4781, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.520983216615047e-07, |
|
"loss": 0.5338, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.3315333742780942e-07, |
|
"loss": 0.5009, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.1493989332218468e-07, |
|
"loss": 0.4998, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.9745935353777222e-07, |
|
"loss": 0.5426, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.8071302737293294e-07, |
|
"loss": 0.4944, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.6470216913317628e-07, |
|
"loss": 0.5074, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.4942797803721543e-07, |
|
"loss": 0.4987, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.348915981271437e-07, |
|
"loss": 0.5302, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.2109411818274851e-07, |
|
"loss": 0.4965, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.0803657163995896e-07, |
|
"loss": 0.5119, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 9.571993651343869e-08, |
|
"loss": 0.4805, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.41451353233369e-08, |
|
"loss": 0.5057, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 7.331303502618903e-08, |
|
"loss": 0.4891, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 6.32244469499832e-08, |
|
"loss": 0.5223, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 5.388012673338661e-08, |
|
"loss": 0.5151, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.528077426915412e-08, |
|
"loss": 0.4966, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.7427033651702414e-08, |
|
"loss": 0.5096, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.03194931288664e-08, |
|
"loss": 0.5051, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.3958685057844378e-08, |
|
"loss": 0.5117, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.83450858653178e-08, |
|
"loss": 0.4816, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.3479116011769766e-08, |
|
"loss": 0.4839, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 9.361139959993549e-09, |
|
"loss": 0.5347, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 5.991466147791114e-09, |
|
"loss": 0.5263, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.3703469648760367e-09, |
|
"loss": 0.5253, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.497978733961958e-09, |
|
"loss": 0.5517, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.745016960665648e-10, |
|
"loss": 0.5018, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 0.0, |
|
"loss": 0.514, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"step": 375, |
|
"total_flos": 144378314096640.0, |
|
"train_loss": 0.765017323811849, |
|
"train_runtime": 10075.457, |
|
"train_samples_per_second": 19.117, |
|
"train_steps_per_second": 0.037 |
|
} |
|
], |
|
"max_steps": 375, |
|
"num_train_epochs": 3, |
|
"total_flos": 144378314096640.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|