|
{ |
|
"best_metric": 1.2186108827590942, |
|
"best_model_checkpoint": "wikitext_roberta-base/checkpoint-666", |
|
"epoch": 19.986666666666668, |
|
"global_step": 740, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.8988, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.8877, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 3e-06, |
|
"loss": 1.9457, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.8727, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 5e-06, |
|
"loss": 1.9502, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 6e-06, |
|
"loss": 1.911, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 1.925, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.8293, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9e-06, |
|
"loss": 1.7548, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1e-05, |
|
"loss": 1.8128, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 1.7672, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.789, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 1.7381, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 1.6687, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.7386, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.6627, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 1.5889, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.5649, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9e-05, |
|
"loss": 1.5465, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5523, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5625, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.1e-05, |
|
"loss": 1.639, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 1.5218, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 1.5603, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.5555, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.5245, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 1.5195, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 1.5227, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 1.4743, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.9e-05, |
|
"loss": 1.5171, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3e-05, |
|
"loss": 1.4961, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.1e-05, |
|
"loss": 1.5427, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.4519, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.3e-05, |
|
"loss": 1.4714, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 1.4477, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.4796, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.4175, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"eval_accuracy": 0.7193657677192031, |
|
"eval_loss": 1.3355050086975098, |
|
"eval_runtime": 13.2653, |
|
"eval_samples_per_second": 37.391, |
|
"eval_steps_per_second": 4.674, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.7e-05, |
|
"loss": 2.1673, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.8e-05, |
|
"loss": 1.4677, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 1.4678, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4e-05, |
|
"loss": 1.4979, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.1e-05, |
|
"loss": 1.4639, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.2e-05, |
|
"loss": 1.4553, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.3e-05, |
|
"loss": 1.3852, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 1.4783, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.416, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 1.4261, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.7e-05, |
|
"loss": 1.3403, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.4469, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.9e-05, |
|
"loss": 1.3988, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 5e-05, |
|
"loss": 1.412, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.9927536231884056e-05, |
|
"loss": 1.4766, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.985507246376812e-05, |
|
"loss": 1.4986, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.9782608695652176e-05, |
|
"loss": 1.4841, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.9710144927536237e-05, |
|
"loss": 1.4311, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.963768115942029e-05, |
|
"loss": 1.4505, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.956521739130435e-05, |
|
"loss": 1.4436, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.949275362318841e-05, |
|
"loss": 1.3686, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.9420289855072464e-05, |
|
"loss": 1.4193, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.9347826086956524e-05, |
|
"loss": 1.4409, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.9275362318840584e-05, |
|
"loss": 1.4257, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.920289855072464e-05, |
|
"loss": 1.3458, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.91304347826087e-05, |
|
"loss": 1.3916, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.905797101449275e-05, |
|
"loss": 1.3797, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.898550724637682e-05, |
|
"loss": 1.4372, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.891304347826087e-05, |
|
"loss": 1.4756, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.884057971014493e-05, |
|
"loss": 1.3883, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.8768115942028986e-05, |
|
"loss": 1.3913, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.8695652173913046e-05, |
|
"loss": 1.3826, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.8623188405797106e-05, |
|
"loss": 1.4326, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.855072463768116e-05, |
|
"loss": 1.4112, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.847826086956522e-05, |
|
"loss": 1.4015, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.840579710144928e-05, |
|
"loss": 1.3996, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.8333333333333334e-05, |
|
"loss": 1.438, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"eval_accuracy": 0.7249340724395889, |
|
"eval_loss": 1.2952723503112793, |
|
"eval_runtime": 13.2107, |
|
"eval_samples_per_second": 37.545, |
|
"eval_steps_per_second": 4.693, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.8260869565217394e-05, |
|
"loss": 2.0645, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.818840579710145e-05, |
|
"loss": 1.4472, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.8115942028985514e-05, |
|
"loss": 1.3352, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.804347826086957e-05, |
|
"loss": 1.4051, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.797101449275362e-05, |
|
"loss": 1.4046, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.789855072463768e-05, |
|
"loss": 1.39, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.782608695652174e-05, |
|
"loss": 1.4223, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.77536231884058e-05, |
|
"loss": 1.3412, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.7681159420289855e-05, |
|
"loss": 1.3806, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 4.7608695652173916e-05, |
|
"loss": 1.4172, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.7536231884057976e-05, |
|
"loss": 1.3621, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.746376811594203e-05, |
|
"loss": 1.403, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.739130434782609e-05, |
|
"loss": 1.3762, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.731884057971015e-05, |
|
"loss": 1.3764, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.72463768115942e-05, |
|
"loss": 1.3957, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.7173913043478264e-05, |
|
"loss": 1.3773, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.710144927536232e-05, |
|
"loss": 1.3872, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.7028985507246384e-05, |
|
"loss": 1.3579, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.695652173913044e-05, |
|
"loss": 1.3718, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.68840579710145e-05, |
|
"loss": 1.3576, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.681159420289855e-05, |
|
"loss": 1.3508, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.673913043478261e-05, |
|
"loss": 1.3476, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 1.3608, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.6594202898550725e-05, |
|
"loss": 1.3911, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.6521739130434785e-05, |
|
"loss": 1.3748, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 4.6449275362318846e-05, |
|
"loss": 1.3628, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.63768115942029e-05, |
|
"loss": 1.3678, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.630434782608696e-05, |
|
"loss": 1.3286, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.623188405797101e-05, |
|
"loss": 1.3592, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.615942028985508e-05, |
|
"loss": 1.3754, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.608695652173913e-05, |
|
"loss": 1.3061, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.601449275362319e-05, |
|
"loss": 1.3843, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.594202898550725e-05, |
|
"loss": 1.3153, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.586956521739131e-05, |
|
"loss": 1.3638, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.579710144927537e-05, |
|
"loss": 1.3712, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.572463768115942e-05, |
|
"loss": 1.3601, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.565217391304348e-05, |
|
"loss": 1.4363, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"eval_accuracy": 0.7276007863625347, |
|
"eval_loss": 1.2758572101593018, |
|
"eval_runtime": 13.2518, |
|
"eval_samples_per_second": 37.429, |
|
"eval_steps_per_second": 4.679, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.557971014492754e-05, |
|
"loss": 2.0676, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.5507246376811595e-05, |
|
"loss": 1.3703, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.5434782608695655e-05, |
|
"loss": 1.3295, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 4.5362318840579715e-05, |
|
"loss": 1.3613, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.528985507246377e-05, |
|
"loss": 1.4229, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.521739130434783e-05, |
|
"loss": 1.4115, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.514492753623188e-05, |
|
"loss": 1.4115, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.507246376811595e-05, |
|
"loss": 1.3827, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.338, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 4.492753623188406e-05, |
|
"loss": 1.434, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 4.4855072463768117e-05, |
|
"loss": 1.3443, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 4.478260869565218e-05, |
|
"loss": 1.3195, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 4.471014492753624e-05, |
|
"loss": 1.3589, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.463768115942029e-05, |
|
"loss": 1.3323, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.456521739130435e-05, |
|
"loss": 1.3151, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.449275362318841e-05, |
|
"loss": 1.3844, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.4420289855072464e-05, |
|
"loss": 1.3213, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.4347826086956525e-05, |
|
"loss": 1.3764, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.427536231884058e-05, |
|
"loss": 1.3449, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.4202898550724645e-05, |
|
"loss": 1.3704, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.41304347826087e-05, |
|
"loss": 1.3712, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 4.405797101449275e-05, |
|
"loss": 1.3191, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 4.398550724637681e-05, |
|
"loss": 1.3795, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.391304347826087e-05, |
|
"loss": 1.3025, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 4.384057971014493e-05, |
|
"loss": 1.3285, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 4.3768115942028986e-05, |
|
"loss": 1.3112, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 4.3695652173913046e-05, |
|
"loss": 1.4008, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 4.362318840579711e-05, |
|
"loss": 1.3598, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 4.355072463768116e-05, |
|
"loss": 1.3402, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 4.347826086956522e-05, |
|
"loss": 1.3282, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.3405797101449274e-05, |
|
"loss": 1.4, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 1.276, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 4.3260869565217394e-05, |
|
"loss": 1.3375, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.318840579710145e-05, |
|
"loss": 1.3169, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 4.3115942028985515e-05, |
|
"loss": 1.3835, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.304347826086957e-05, |
|
"loss": 1.3977, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 4.297101449275363e-05, |
|
"loss": 1.3391, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"eval_accuracy": 0.7251519097222222, |
|
"eval_loss": 1.2903902530670166, |
|
"eval_runtime": 13.26, |
|
"eval_samples_per_second": 37.406, |
|
"eval_steps_per_second": 4.676, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 4.289855072463768e-05, |
|
"loss": 2.05, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 4.282608695652174e-05, |
|
"loss": 1.3425, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 4.27536231884058e-05, |
|
"loss": 1.3443, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 4.2681159420289856e-05, |
|
"loss": 1.3464, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 4.2608695652173916e-05, |
|
"loss": 1.355, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 4.2536231884057976e-05, |
|
"loss": 1.3285, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 4.246376811594203e-05, |
|
"loss": 1.3246, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 4.239130434782609e-05, |
|
"loss": 1.3213, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 4.2318840579710143e-05, |
|
"loss": 1.297, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 4.224637681159421e-05, |
|
"loss": 1.3569, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 4.2173913043478264e-05, |
|
"loss": 1.3392, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 4.210144927536232e-05, |
|
"loss": 1.2817, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 4.202898550724638e-05, |
|
"loss": 1.3187, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 4.195652173913044e-05, |
|
"loss": 1.3094, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 4.18840579710145e-05, |
|
"loss": 1.4001, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 4.181159420289855e-05, |
|
"loss": 1.3204, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 4.1739130434782605e-05, |
|
"loss": 1.3482, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 1.3674, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 4.1594202898550726e-05, |
|
"loss": 1.3334, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.1521739130434786e-05, |
|
"loss": 1.3333, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.144927536231884e-05, |
|
"loss": 1.3723, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 4.13768115942029e-05, |
|
"loss": 1.3502, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 4.130434782608696e-05, |
|
"loss": 1.3256, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.123188405797101e-05, |
|
"loss": 1.4001, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 4.115942028985507e-05, |
|
"loss": 1.3288, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 4.1086956521739134e-05, |
|
"loss": 1.31, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 4.101449275362319e-05, |
|
"loss": 1.3456, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 4.094202898550725e-05, |
|
"loss": 1.3317, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 4.086956521739131e-05, |
|
"loss": 1.3362, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 4.079710144927537e-05, |
|
"loss": 1.3092, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 4.072463768115942e-05, |
|
"loss": 1.2915, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 4.065217391304348e-05, |
|
"loss": 1.3801, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 4.057971014492754e-05, |
|
"loss": 1.2969, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 4.0507246376811595e-05, |
|
"loss": 1.3184, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 4.0434782608695655e-05, |
|
"loss": 1.2744, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 4.036231884057971e-05, |
|
"loss": 1.3823, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 4.028985507246377e-05, |
|
"loss": 1.3741, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"eval_accuracy": 0.7290188004551119, |
|
"eval_loss": 1.2620676755905151, |
|
"eval_runtime": 13.2557, |
|
"eval_samples_per_second": 37.418, |
|
"eval_steps_per_second": 4.677, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 4.021739130434783e-05, |
|
"loss": 1.9576, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 4.014492753623188e-05, |
|
"loss": 1.4131, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 4.007246376811594e-05, |
|
"loss": 1.308, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 4e-05, |
|
"loss": 1.3495, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 3.9927536231884064e-05, |
|
"loss": 1.2944, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 3.985507246376812e-05, |
|
"loss": 1.3534, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 3.978260869565217e-05, |
|
"loss": 1.3448, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 3.971014492753624e-05, |
|
"loss": 1.3493, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 3.963768115942029e-05, |
|
"loss": 1.3033, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 3.956521739130435e-05, |
|
"loss": 1.3327, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 3.9492753623188405e-05, |
|
"loss": 1.3037, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 3.9420289855072465e-05, |
|
"loss": 1.2676, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 3.9347826086956525e-05, |
|
"loss": 1.277, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 3.927536231884058e-05, |
|
"loss": 1.306, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 3.920289855072464e-05, |
|
"loss": 1.3317, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 3.91304347826087e-05, |
|
"loss": 1.2872, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 3.905797101449275e-05, |
|
"loss": 1.2878, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 3.898550724637681e-05, |
|
"loss": 1.3232, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 3.8913043478260866e-05, |
|
"loss": 1.2771, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 3.884057971014493e-05, |
|
"loss": 1.3035, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 3.876811594202899e-05, |
|
"loss": 1.3175, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 3.869565217391305e-05, |
|
"loss": 1.2661, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 3.862318840579711e-05, |
|
"loss": 1.3559, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 3.855072463768116e-05, |
|
"loss": 1.3458, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 3.847826086956522e-05, |
|
"loss": 1.2947, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 3.8405797101449274e-05, |
|
"loss": 1.2938, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 3.8333333333333334e-05, |
|
"loss": 1.3266, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 3.8260869565217395e-05, |
|
"loss": 1.2853, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 3.818840579710145e-05, |
|
"loss": 1.3009, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 3.811594202898551e-05, |
|
"loss": 1.3023, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 3.804347826086957e-05, |
|
"loss": 1.3105, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 3.797101449275363e-05, |
|
"loss": 1.343, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 3.789855072463768e-05, |
|
"loss": 1.2957, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 3.7826086956521736e-05, |
|
"loss": 1.307, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 3.77536231884058e-05, |
|
"loss": 1.3477, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 3.7681159420289856e-05, |
|
"loss": 1.347, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 3.7608695652173917e-05, |
|
"loss": 1.2771, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"eval_accuracy": 0.7353204415394212, |
|
"eval_loss": 1.2311729192733765, |
|
"eval_runtime": 13.2727, |
|
"eval_samples_per_second": 37.37, |
|
"eval_steps_per_second": 4.671, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 3.753623188405797e-05, |
|
"loss": 1.9712, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 3.746376811594203e-05, |
|
"loss": 1.2942, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 3.739130434782609e-05, |
|
"loss": 1.3117, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 3.7318840579710144e-05, |
|
"loss": 1.3395, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 3.7246376811594204e-05, |
|
"loss": 1.3447, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 3.7173913043478264e-05, |
|
"loss": 1.2895, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 3.710144927536232e-05, |
|
"loss": 1.2623, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 3.702898550724638e-05, |
|
"loss": 1.2987, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 3.695652173913043e-05, |
|
"loss": 1.3021, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 3.68840579710145e-05, |
|
"loss": 1.3305, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 3.681159420289855e-05, |
|
"loss": 1.2959, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 3.673913043478261e-05, |
|
"loss": 1.2879, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 1.348, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 3.6594202898550726e-05, |
|
"loss": 1.3434, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 3.6521739130434786e-05, |
|
"loss": 1.3034, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 3.644927536231884e-05, |
|
"loss": 1.3372, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 3.63768115942029e-05, |
|
"loss": 1.2488, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 3.630434782608696e-05, |
|
"loss": 1.2779, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 3.6231884057971014e-05, |
|
"loss": 1.3044, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 3.6159420289855074e-05, |
|
"loss": 1.3089, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 3.6086956521739134e-05, |
|
"loss": 1.3803, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 3.6014492753623194e-05, |
|
"loss": 1.3559, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 3.594202898550725e-05, |
|
"loss": 1.3486, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 3.58695652173913e-05, |
|
"loss": 1.2817, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 3.579710144927537e-05, |
|
"loss": 1.2304, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 3.572463768115942e-05, |
|
"loss": 1.3637, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 3.565217391304348e-05, |
|
"loss": 1.3177, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 3.5579710144927535e-05, |
|
"loss": 1.3273, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 3.5507246376811596e-05, |
|
"loss": 1.3522, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 3.5434782608695656e-05, |
|
"loss": 1.3314, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 3.536231884057971e-05, |
|
"loss": 1.2812, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 3.528985507246377e-05, |
|
"loss": 1.2961, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 3.521739130434783e-05, |
|
"loss": 1.358, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 3.514492753623188e-05, |
|
"loss": 1.2733, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 3.5072463768115943e-05, |
|
"loss": 1.2509, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.2686, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 3.4927536231884064e-05, |
|
"loss": 1.287, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"eval_accuracy": 0.7288652772101893, |
|
"eval_loss": 1.2542475461959839, |
|
"eval_runtime": 13.3151, |
|
"eval_samples_per_second": 37.251, |
|
"eval_steps_per_second": 4.656, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 3.485507246376812e-05, |
|
"loss": 2.0198, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 3.478260869565218e-05, |
|
"loss": 1.3153, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 3.471014492753623e-05, |
|
"loss": 1.2692, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 3.463768115942029e-05, |
|
"loss": 1.327, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 3.456521739130435e-05, |
|
"loss": 1.2767, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 3.4492753623188405e-05, |
|
"loss": 1.3097, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 3.4420289855072465e-05, |
|
"loss": 1.2951, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 3.4347826086956526e-05, |
|
"loss": 1.2827, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 3.427536231884058e-05, |
|
"loss": 1.2769, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 3.420289855072464e-05, |
|
"loss": 1.3052, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 3.413043478260869e-05, |
|
"loss": 1.3424, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 3.405797101449276e-05, |
|
"loss": 1.3514, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 3.398550724637681e-05, |
|
"loss": 1.3662, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 3.3913043478260867e-05, |
|
"loss": 1.3694, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 3.3840579710144934e-05, |
|
"loss": 1.2747, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 3.376811594202899e-05, |
|
"loss": 1.3502, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 3.369565217391305e-05, |
|
"loss": 1.2687, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 3.36231884057971e-05, |
|
"loss": 1.2702, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 3.355072463768116e-05, |
|
"loss": 1.2983, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 3.347826086956522e-05, |
|
"loss": 1.3027, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 3.3405797101449275e-05, |
|
"loss": 1.2854, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.2679, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 3.3260869565217395e-05, |
|
"loss": 1.379, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 3.318840579710145e-05, |
|
"loss": 1.3008, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 3.311594202898551e-05, |
|
"loss": 1.343, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 3.304347826086956e-05, |
|
"loss": 1.266, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 3.297101449275363e-05, |
|
"loss": 1.3153, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 3.289855072463768e-05, |
|
"loss": 1.2899, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 3.282608695652174e-05, |
|
"loss": 1.2609, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 3.2753623188405796e-05, |
|
"loss": 1.3002, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 3.268115942028986e-05, |
|
"loss": 1.2858, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 3.260869565217392e-05, |
|
"loss": 1.3049, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 3.253623188405797e-05, |
|
"loss": 1.2891, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 3.246376811594203e-05, |
|
"loss": 1.209, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 3.239130434782609e-05, |
|
"loss": 1.2867, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 3.2318840579710144e-05, |
|
"loss": 1.2934, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 3.2246376811594205e-05, |
|
"loss": 1.29, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"eval_accuracy": 0.7345346311640254, |
|
"eval_loss": 1.2290480136871338, |
|
"eval_runtime": 13.2843, |
|
"eval_samples_per_second": 37.337, |
|
"eval_steps_per_second": 4.667, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 3.217391304347826e-05, |
|
"loss": 1.9383, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 3.2101449275362325e-05, |
|
"loss": 1.2926, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 3.202898550724638e-05, |
|
"loss": 1.3316, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 3.195652173913043e-05, |
|
"loss": 1.2614, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 3.188405797101449e-05, |
|
"loss": 1.316, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 3.181159420289855e-05, |
|
"loss": 1.2777, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 3.173913043478261e-05, |
|
"loss": 1.3079, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 3.1666666666666666e-05, |
|
"loss": 1.3451, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 3.1594202898550726e-05, |
|
"loss": 1.2871, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 3.152173913043479e-05, |
|
"loss": 1.3431, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 3.144927536231884e-05, |
|
"loss": 1.2507, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 3.13768115942029e-05, |
|
"loss": 1.292, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 3.130434782608696e-05, |
|
"loss": 1.2764, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 3.1231884057971014e-05, |
|
"loss": 1.3385, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 3.1159420289855074e-05, |
|
"loss": 1.3285, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 3.108695652173913e-05, |
|
"loss": 1.2385, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 3.1014492753623195e-05, |
|
"loss": 1.2528, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 3.094202898550725e-05, |
|
"loss": 1.3026, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 3.086956521739131e-05, |
|
"loss": 1.3108, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 3.079710144927536e-05, |
|
"loss": 1.2307, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 3.072463768115942e-05, |
|
"loss": 1.2586, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 3.065217391304348e-05, |
|
"loss": 1.3263, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 3.0579710144927536e-05, |
|
"loss": 1.2522, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 3.0507246376811593e-05, |
|
"loss": 1.2695, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 3.0434782608695656e-05, |
|
"loss": 1.2588, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 3.0362318840579713e-05, |
|
"loss": 1.2759, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 3.028985507246377e-05, |
|
"loss": 1.3229, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 3.0217391304347827e-05, |
|
"loss": 1.2949, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 3.0144927536231887e-05, |
|
"loss": 1.3185, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 3.0072463768115944e-05, |
|
"loss": 1.2303, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 3e-05, |
|
"loss": 1.2309, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 2.9927536231884058e-05, |
|
"loss": 1.2804, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 2.9855072463768118e-05, |
|
"loss": 1.2964, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 2.9782608695652175e-05, |
|
"loss": 1.2547, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 2.971014492753623e-05, |
|
"loss": 1.2711, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 2.963768115942029e-05, |
|
"loss": 1.2578, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 2.9565217391304352e-05, |
|
"loss": 1.2948, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"eval_accuracy": 0.7286482668694991, |
|
"eval_loss": 1.2536934614181519, |
|
"eval_runtime": 13.2456, |
|
"eval_samples_per_second": 37.446, |
|
"eval_steps_per_second": 4.681, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 2.949275362318841e-05, |
|
"loss": 1.9494, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 2.9420289855072462e-05, |
|
"loss": 1.2457, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 2.9347826086956526e-05, |
|
"loss": 1.2984, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 2.9275362318840583e-05, |
|
"loss": 1.315, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 2.920289855072464e-05, |
|
"loss": 1.2701, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 2.9130434782608696e-05, |
|
"loss": 1.2902, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 2.9057971014492757e-05, |
|
"loss": 1.3615, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 2.8985507246376814e-05, |
|
"loss": 1.2761, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 2.891304347826087e-05, |
|
"loss": 1.2678, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 2.8840579710144927e-05, |
|
"loss": 1.2982, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 2.8768115942028988e-05, |
|
"loss": 1.2626, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 2.8695652173913044e-05, |
|
"loss": 1.2179, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 2.86231884057971e-05, |
|
"loss": 1.2992, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 2.8550724637681158e-05, |
|
"loss": 1.3423, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 2.847826086956522e-05, |
|
"loss": 1.2727, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 2.840579710144928e-05, |
|
"loss": 1.2516, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 2.8333333333333335e-05, |
|
"loss": 1.259, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 2.826086956521739e-05, |
|
"loss": 1.28, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 2.8188405797101452e-05, |
|
"loss": 1.2764, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 2.811594202898551e-05, |
|
"loss": 1.2954, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 2.8043478260869566e-05, |
|
"loss": 1.3081, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.7971014492753623e-05, |
|
"loss": 1.3355, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 2.7898550724637683e-05, |
|
"loss": 1.3269, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 2.782608695652174e-05, |
|
"loss": 1.2538, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 2.7753623188405797e-05, |
|
"loss": 1.2652, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 2.7681159420289854e-05, |
|
"loss": 1.2239, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 2.7608695652173917e-05, |
|
"loss": 1.2764, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 2.753623188405797e-05, |
|
"loss": 1.2484, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 2.7463768115942028e-05, |
|
"loss": 1.3045, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 2.7391304347826085e-05, |
|
"loss": 1.2679, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 2.7318840579710148e-05, |
|
"loss": 1.3149, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 2.7246376811594205e-05, |
|
"loss": 1.2347, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 2.7173913043478262e-05, |
|
"loss": 1.2483, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 2.7101449275362322e-05, |
|
"loss": 1.2693, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 2.702898550724638e-05, |
|
"loss": 1.2849, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 2.6956521739130436e-05, |
|
"loss": 1.2808, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 2.6884057971014493e-05, |
|
"loss": 1.2741, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"eval_accuracy": 0.7354103508012126, |
|
"eval_loss": 1.2199150323867798, |
|
"eval_runtime": 13.2633, |
|
"eval_samples_per_second": 37.397, |
|
"eval_steps_per_second": 4.675, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 2.6811594202898553e-05, |
|
"loss": 1.8651, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"learning_rate": 2.673913043478261e-05, |
|
"loss": 1.3189, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 1.2528, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 2.6594202898550723e-05, |
|
"loss": 1.319, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 2.6521739130434787e-05, |
|
"loss": 1.2792, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 2.6449275362318844e-05, |
|
"loss": 1.2567, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"learning_rate": 2.63768115942029e-05, |
|
"loss": 1.2522, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 2.6304347826086954e-05, |
|
"loss": 1.2899, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"learning_rate": 2.6231884057971018e-05, |
|
"loss": 1.2804, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"learning_rate": 2.6159420289855075e-05, |
|
"loss": 1.2721, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"learning_rate": 2.608695652173913e-05, |
|
"loss": 1.2901, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 2.601449275362319e-05, |
|
"loss": 1.3134, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 2.594202898550725e-05, |
|
"loss": 1.2504, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 2.5869565217391305e-05, |
|
"loss": 1.3061, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 2.5797101449275362e-05, |
|
"loss": 1.2553, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 2.572463768115942e-05, |
|
"loss": 1.2534, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"learning_rate": 2.5652173913043483e-05, |
|
"loss": 1.2761, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"learning_rate": 2.5579710144927536e-05, |
|
"loss": 1.3338, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"learning_rate": 2.5507246376811593e-05, |
|
"loss": 1.256, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"learning_rate": 2.543478260869565e-05, |
|
"loss": 1.1987, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 2.5362318840579714e-05, |
|
"loss": 1.2809, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"learning_rate": 2.528985507246377e-05, |
|
"loss": 1.238, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 2.5217391304347827e-05, |
|
"loss": 1.2326, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 2.5144927536231884e-05, |
|
"loss": 1.2594, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"learning_rate": 2.5072463768115944e-05, |
|
"loss": 1.278, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.2594, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"learning_rate": 2.492753623188406e-05, |
|
"loss": 1.2381, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 2.4855072463768118e-05, |
|
"loss": 1.2416, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 10.77, |
|
"learning_rate": 2.4782608695652175e-05, |
|
"loss": 1.2395, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 2.4710144927536232e-05, |
|
"loss": 1.2736, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 2.4637681159420292e-05, |
|
"loss": 1.2334, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 2.456521739130435e-05, |
|
"loss": 1.2722, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 2.449275362318841e-05, |
|
"loss": 1.2749, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 10.91, |
|
"learning_rate": 2.4420289855072466e-05, |
|
"loss": 1.3086, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 2.4347826086956523e-05, |
|
"loss": 1.3194, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"learning_rate": 2.427536231884058e-05, |
|
"loss": 1.2443, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 2.420289855072464e-05, |
|
"loss": 1.2342, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"eval_accuracy": 0.730909385375334, |
|
"eval_loss": 1.2519582509994507, |
|
"eval_runtime": 13.3427, |
|
"eval_samples_per_second": 37.174, |
|
"eval_steps_per_second": 4.647, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"learning_rate": 2.4130434782608697e-05, |
|
"loss": 1.9108, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"learning_rate": 2.4057971014492757e-05, |
|
"loss": 1.2788, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 11.08, |
|
"learning_rate": 2.398550724637681e-05, |
|
"loss": 1.2999, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"learning_rate": 2.391304347826087e-05, |
|
"loss": 1.2402, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"learning_rate": 2.3840579710144928e-05, |
|
"loss": 1.2587, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 2.3768115942028988e-05, |
|
"loss": 1.3018, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 11.19, |
|
"learning_rate": 2.3695652173913045e-05, |
|
"loss": 1.2506, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 2.36231884057971e-05, |
|
"loss": 1.2885, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 11.24, |
|
"learning_rate": 2.355072463768116e-05, |
|
"loss": 1.331, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 2.347826086956522e-05, |
|
"loss": 1.2617, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 11.29, |
|
"learning_rate": 2.3405797101449276e-05, |
|
"loss": 1.3315, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 1.2644, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"learning_rate": 2.3260869565217393e-05, |
|
"loss": 1.2276, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"learning_rate": 2.318840579710145e-05, |
|
"loss": 1.2159, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 11.4, |
|
"learning_rate": 2.3115942028985506e-05, |
|
"loss": 1.2182, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 11.43, |
|
"learning_rate": 2.3043478260869567e-05, |
|
"loss": 1.2267, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"learning_rate": 2.2971014492753623e-05, |
|
"loss": 1.3009, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"learning_rate": 2.2898550724637684e-05, |
|
"loss": 1.2403, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"learning_rate": 2.282608695652174e-05, |
|
"loss": 1.2692, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 2.2753623188405797e-05, |
|
"loss": 1.2098, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 2.2681159420289858e-05, |
|
"loss": 1.2321, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 11.59, |
|
"learning_rate": 2.2608695652173914e-05, |
|
"loss": 1.2206, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"learning_rate": 2.2536231884057975e-05, |
|
"loss": 1.2684, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 11.64, |
|
"learning_rate": 2.246376811594203e-05, |
|
"loss": 1.1974, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 2.239130434782609e-05, |
|
"loss": 1.2054, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 11.69, |
|
"learning_rate": 2.2318840579710145e-05, |
|
"loss": 1.2492, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 11.72, |
|
"learning_rate": 2.2246376811594205e-05, |
|
"loss": 1.2817, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 11.75, |
|
"learning_rate": 2.2173913043478262e-05, |
|
"loss": 1.25, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"learning_rate": 2.2101449275362323e-05, |
|
"loss": 1.2499, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"learning_rate": 2.2028985507246376e-05, |
|
"loss": 1.2213, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"learning_rate": 2.1956521739130436e-05, |
|
"loss": 1.2857, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 11.85, |
|
"learning_rate": 2.1884057971014493e-05, |
|
"loss": 1.2746, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"learning_rate": 2.1811594202898553e-05, |
|
"loss": 1.2603, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"learning_rate": 2.173913043478261e-05, |
|
"loss": 1.2651, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"learning_rate": 2.1666666666666667e-05, |
|
"loss": 1.2772, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 11.96, |
|
"learning_rate": 2.1594202898550724e-05, |
|
"loss": 1.2843, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 11.99, |
|
"learning_rate": 2.1521739130434784e-05, |
|
"loss": 1.2199, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 11.99, |
|
"eval_accuracy": 0.7259977842029887, |
|
"eval_loss": 1.273816704750061, |
|
"eval_runtime": 13.2756, |
|
"eval_samples_per_second": 37.362, |
|
"eval_steps_per_second": 4.67, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 2.144927536231884e-05, |
|
"loss": 1.8625, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 12.05, |
|
"learning_rate": 2.13768115942029e-05, |
|
"loss": 1.2381, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 12.08, |
|
"learning_rate": 2.1304347826086958e-05, |
|
"loss": 1.2111, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 12.11, |
|
"learning_rate": 2.1231884057971015e-05, |
|
"loss": 1.2404, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 12.13, |
|
"learning_rate": 2.1159420289855072e-05, |
|
"loss": 1.2764, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"learning_rate": 2.1086956521739132e-05, |
|
"loss": 1.2617, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"learning_rate": 2.101449275362319e-05, |
|
"loss": 1.2685, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 2.094202898550725e-05, |
|
"loss": 1.1995, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 2.0869565217391303e-05, |
|
"loss": 1.2554, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"learning_rate": 2.0797101449275363e-05, |
|
"loss": 1.2545, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 2.072463768115942e-05, |
|
"loss": 1.2315, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 12.32, |
|
"learning_rate": 2.065217391304348e-05, |
|
"loss": 1.2373, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 12.35, |
|
"learning_rate": 2.0579710144927537e-05, |
|
"loss": 1.2745, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 2.0507246376811594e-05, |
|
"loss": 1.2405, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 2.0434782608695654e-05, |
|
"loss": 1.2467, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 12.43, |
|
"learning_rate": 2.036231884057971e-05, |
|
"loss": 1.213, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 12.45, |
|
"learning_rate": 2.028985507246377e-05, |
|
"loss": 1.2499, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 12.48, |
|
"learning_rate": 2.0217391304347828e-05, |
|
"loss": 1.2601, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 12.51, |
|
"learning_rate": 2.0144927536231885e-05, |
|
"loss": 1.2704, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"learning_rate": 2.007246376811594e-05, |
|
"loss": 1.2739, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 12.56, |
|
"learning_rate": 2e-05, |
|
"loss": 1.1849, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 12.59, |
|
"learning_rate": 1.992753623188406e-05, |
|
"loss": 1.1746, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 1.985507246376812e-05, |
|
"loss": 1.2887, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"learning_rate": 1.9782608695652176e-05, |
|
"loss": 1.2067, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 1.9710144927536232e-05, |
|
"loss": 1.2601, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 1.963768115942029e-05, |
|
"loss": 1.2632, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 12.72, |
|
"learning_rate": 1.956521739130435e-05, |
|
"loss": 1.2633, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 1.9492753623188406e-05, |
|
"loss": 1.216, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 12.77, |
|
"learning_rate": 1.9420289855072467e-05, |
|
"loss": 1.2608, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"learning_rate": 1.9347826086956523e-05, |
|
"loss": 1.2219, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 12.83, |
|
"learning_rate": 1.927536231884058e-05, |
|
"loss": 1.2864, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 12.85, |
|
"learning_rate": 1.9202898550724637e-05, |
|
"loss": 1.3147, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"learning_rate": 1.9130434782608697e-05, |
|
"loss": 1.2553, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 1.9057971014492754e-05, |
|
"loss": 1.2906, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"learning_rate": 1.8985507246376814e-05, |
|
"loss": 1.2831, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"learning_rate": 1.8913043478260868e-05, |
|
"loss": 1.2721, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"learning_rate": 1.8840579710144928e-05, |
|
"loss": 1.206, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"eval_accuracy": 0.7335030880918842, |
|
"eval_loss": 1.2285895347595215, |
|
"eval_runtime": 13.2581, |
|
"eval_samples_per_second": 37.411, |
|
"eval_steps_per_second": 4.676, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 13.03, |
|
"learning_rate": 1.8768115942028985e-05, |
|
"loss": 1.8759, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 13.05, |
|
"learning_rate": 1.8695652173913045e-05, |
|
"loss": 1.2655, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 13.08, |
|
"learning_rate": 1.8623188405797102e-05, |
|
"loss": 1.218, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 13.11, |
|
"learning_rate": 1.855072463768116e-05, |
|
"loss": 1.2632, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 13.13, |
|
"learning_rate": 1.8478260869565216e-05, |
|
"loss": 1.2984, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 13.16, |
|
"learning_rate": 1.8405797101449276e-05, |
|
"loss": 1.2791, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 13.19, |
|
"learning_rate": 1.8333333333333333e-05, |
|
"loss": 1.2126, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 13.21, |
|
"learning_rate": 1.8260869565217393e-05, |
|
"loss": 1.2503, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"learning_rate": 1.818840579710145e-05, |
|
"loss": 1.2168, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 13.27, |
|
"learning_rate": 1.8115942028985507e-05, |
|
"loss": 1.3218, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 13.29, |
|
"learning_rate": 1.8043478260869567e-05, |
|
"loss": 1.2605, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 13.32, |
|
"learning_rate": 1.7971014492753624e-05, |
|
"loss": 1.2497, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 13.35, |
|
"learning_rate": 1.7898550724637684e-05, |
|
"loss": 1.2989, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"learning_rate": 1.782608695652174e-05, |
|
"loss": 1.2328, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"learning_rate": 1.7753623188405798e-05, |
|
"loss": 1.2262, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 13.43, |
|
"learning_rate": 1.7681159420289855e-05, |
|
"loss": 1.2803, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 13.45, |
|
"learning_rate": 1.7608695652173915e-05, |
|
"loss": 1.273, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 13.48, |
|
"learning_rate": 1.7536231884057972e-05, |
|
"loss": 1.2922, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"learning_rate": 1.7463768115942032e-05, |
|
"loss": 1.1986, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 13.53, |
|
"learning_rate": 1.739130434782609e-05, |
|
"loss": 1.267, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 13.56, |
|
"learning_rate": 1.7318840579710146e-05, |
|
"loss": 1.2887, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 13.59, |
|
"learning_rate": 1.7246376811594203e-05, |
|
"loss": 1.1824, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 13.61, |
|
"learning_rate": 1.7173913043478263e-05, |
|
"loss": 1.2688, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 13.64, |
|
"learning_rate": 1.710144927536232e-05, |
|
"loss": 1.2429, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 13.67, |
|
"learning_rate": 1.702898550724638e-05, |
|
"loss": 1.2028, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 13.69, |
|
"learning_rate": 1.6956521739130433e-05, |
|
"loss": 1.2543, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"learning_rate": 1.6884057971014494e-05, |
|
"loss": 1.2463, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"learning_rate": 1.681159420289855e-05, |
|
"loss": 1.2003, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 13.77, |
|
"learning_rate": 1.673913043478261e-05, |
|
"loss": 1.2947, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 13.8, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.2499, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 13.83, |
|
"learning_rate": 1.6594202898550724e-05, |
|
"loss": 1.2518, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"learning_rate": 1.652173913043478e-05, |
|
"loss": 1.2409, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 13.88, |
|
"learning_rate": 1.644927536231884e-05, |
|
"loss": 1.295, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 13.91, |
|
"learning_rate": 1.6376811594202898e-05, |
|
"loss": 1.2678, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"learning_rate": 1.630434782608696e-05, |
|
"loss": 1.217, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"learning_rate": 1.6231884057971015e-05, |
|
"loss": 1.2197, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 13.99, |
|
"learning_rate": 1.6159420289855072e-05, |
|
"loss": 1.221, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 13.99, |
|
"eval_accuracy": 0.7327190178115681, |
|
"eval_loss": 1.2421268224716187, |
|
"eval_runtime": 13.279, |
|
"eval_samples_per_second": 37.352, |
|
"eval_steps_per_second": 4.669, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"learning_rate": 1.608695652173913e-05, |
|
"loss": 1.834, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 14.05, |
|
"learning_rate": 1.601449275362319e-05, |
|
"loss": 1.2727, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"learning_rate": 1.5942028985507246e-05, |
|
"loss": 1.2753, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 14.11, |
|
"learning_rate": 1.5869565217391306e-05, |
|
"loss": 1.2137, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 14.13, |
|
"learning_rate": 1.5797101449275363e-05, |
|
"loss": 1.2626, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 14.16, |
|
"learning_rate": 1.572463768115942e-05, |
|
"loss": 1.2518, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 14.19, |
|
"learning_rate": 1.565217391304348e-05, |
|
"loss": 1.2167, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 14.21, |
|
"learning_rate": 1.5579710144927537e-05, |
|
"loss": 1.1935, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 14.24, |
|
"learning_rate": 1.5507246376811597e-05, |
|
"loss": 1.2374, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"learning_rate": 1.5434782608695654e-05, |
|
"loss": 1.2236, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 14.29, |
|
"learning_rate": 1.536231884057971e-05, |
|
"loss": 1.2117, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"learning_rate": 1.5289855072463768e-05, |
|
"loss": 1.3012, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"learning_rate": 1.5217391304347828e-05, |
|
"loss": 1.3001, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 14.37, |
|
"learning_rate": 1.5144927536231885e-05, |
|
"loss": 1.198, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 14.4, |
|
"learning_rate": 1.5072463768115944e-05, |
|
"loss": 1.2198, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 14.43, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.2423, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 14.45, |
|
"learning_rate": 1.4927536231884059e-05, |
|
"loss": 1.2475, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 14.48, |
|
"learning_rate": 1.4855072463768116e-05, |
|
"loss": 1.2687, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 14.51, |
|
"learning_rate": 1.4782608695652176e-05, |
|
"loss": 1.2282, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"learning_rate": 1.4710144927536231e-05, |
|
"loss": 1.2676, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"learning_rate": 1.4637681159420291e-05, |
|
"loss": 1.3138, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 14.59, |
|
"learning_rate": 1.4565217391304348e-05, |
|
"loss": 1.2945, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"learning_rate": 1.4492753623188407e-05, |
|
"loss": 1.2479, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 14.64, |
|
"learning_rate": 1.4420289855072464e-05, |
|
"loss": 1.2788, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 14.67, |
|
"learning_rate": 1.4347826086956522e-05, |
|
"loss": 1.2951, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 14.69, |
|
"learning_rate": 1.4275362318840579e-05, |
|
"loss": 1.2326, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 14.72, |
|
"learning_rate": 1.420289855072464e-05, |
|
"loss": 1.2256, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"learning_rate": 1.4130434782608694e-05, |
|
"loss": 1.213, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 14.77, |
|
"learning_rate": 1.4057971014492755e-05, |
|
"loss": 1.2928, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"learning_rate": 1.3985507246376811e-05, |
|
"loss": 1.2372, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 14.83, |
|
"learning_rate": 1.391304347826087e-05, |
|
"loss": 1.2561, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 14.85, |
|
"learning_rate": 1.3840579710144927e-05, |
|
"loss": 1.23, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 14.88, |
|
"learning_rate": 1.3768115942028985e-05, |
|
"loss": 1.2413, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 14.91, |
|
"learning_rate": 1.3695652173913042e-05, |
|
"loss": 1.2263, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 14.93, |
|
"learning_rate": 1.3623188405797103e-05, |
|
"loss": 1.2865, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"learning_rate": 1.3550724637681161e-05, |
|
"loss": 1.2619, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 14.99, |
|
"learning_rate": 1.3478260869565218e-05, |
|
"loss": 1.2062, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 14.99, |
|
"eval_accuracy": 0.732803299595691, |
|
"eval_loss": 1.2402293682098389, |
|
"eval_runtime": 13.2984, |
|
"eval_samples_per_second": 37.298, |
|
"eval_steps_per_second": 4.662, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 15.03, |
|
"learning_rate": 1.3405797101449276e-05, |
|
"loss": 1.8577, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 15.05, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.2946, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 15.08, |
|
"learning_rate": 1.3260869565217394e-05, |
|
"loss": 1.2822, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"learning_rate": 1.318840579710145e-05, |
|
"loss": 1.2716, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 15.13, |
|
"learning_rate": 1.3115942028985509e-05, |
|
"loss": 1.2188, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 15.16, |
|
"learning_rate": 1.3043478260869566e-05, |
|
"loss": 1.2308, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 15.19, |
|
"learning_rate": 1.2971014492753624e-05, |
|
"loss": 1.2465, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 15.21, |
|
"learning_rate": 1.2898550724637681e-05, |
|
"loss": 1.2039, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 15.24, |
|
"learning_rate": 1.2826086956521741e-05, |
|
"loss": 1.2685, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 15.27, |
|
"learning_rate": 1.2753623188405797e-05, |
|
"loss": 1.224, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 15.29, |
|
"learning_rate": 1.2681159420289857e-05, |
|
"loss": 1.2462, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 15.32, |
|
"learning_rate": 1.2608695652173914e-05, |
|
"loss": 1.2162, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 15.35, |
|
"learning_rate": 1.2536231884057972e-05, |
|
"loss": 1.1778, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"learning_rate": 1.246376811594203e-05, |
|
"loss": 1.2065, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 15.4, |
|
"learning_rate": 1.2391304347826088e-05, |
|
"loss": 1.2536, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 15.43, |
|
"learning_rate": 1.2318840579710146e-05, |
|
"loss": 1.2198, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 15.45, |
|
"learning_rate": 1.2246376811594205e-05, |
|
"loss": 1.242, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 15.48, |
|
"learning_rate": 1.2173913043478261e-05, |
|
"loss": 1.2376, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 15.51, |
|
"learning_rate": 1.210144927536232e-05, |
|
"loss": 1.2419, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 15.53, |
|
"learning_rate": 1.2028985507246379e-05, |
|
"loss": 1.2673, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 15.56, |
|
"learning_rate": 1.1956521739130435e-05, |
|
"loss": 1.2265, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"learning_rate": 1.1884057971014494e-05, |
|
"loss": 1.2708, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 15.61, |
|
"learning_rate": 1.181159420289855e-05, |
|
"loss": 1.2264, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 15.64, |
|
"learning_rate": 1.173913043478261e-05, |
|
"loss": 1.2257, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 15.67, |
|
"learning_rate": 1.1666666666666668e-05, |
|
"loss": 1.1863, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 15.69, |
|
"learning_rate": 1.1594202898550725e-05, |
|
"loss": 1.1775, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 15.72, |
|
"learning_rate": 1.1521739130434783e-05, |
|
"loss": 1.2742, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 15.75, |
|
"learning_rate": 1.1449275362318842e-05, |
|
"loss": 1.2685, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 15.77, |
|
"learning_rate": 1.1376811594202899e-05, |
|
"loss": 1.2752, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"learning_rate": 1.1304347826086957e-05, |
|
"loss": 1.2163, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"learning_rate": 1.1231884057971016e-05, |
|
"loss": 1.279, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 15.85, |
|
"learning_rate": 1.1159420289855073e-05, |
|
"loss": 1.2633, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 15.88, |
|
"learning_rate": 1.1086956521739131e-05, |
|
"loss": 1.2338, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 15.91, |
|
"learning_rate": 1.1014492753623188e-05, |
|
"loss": 1.2283, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 15.93, |
|
"learning_rate": 1.0942028985507247e-05, |
|
"loss": 1.2701, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 15.96, |
|
"learning_rate": 1.0869565217391305e-05, |
|
"loss": 1.2949, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 15.99, |
|
"learning_rate": 1.0797101449275362e-05, |
|
"loss": 1.2305, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 15.99, |
|
"eval_accuracy": 0.7307723434675875, |
|
"eval_loss": 1.247312307357788, |
|
"eval_runtime": 13.2755, |
|
"eval_samples_per_second": 37.362, |
|
"eval_steps_per_second": 4.67, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 16.03, |
|
"learning_rate": 1.072463768115942e-05, |
|
"loss": 1.8458, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 16.05, |
|
"learning_rate": 1.0652173913043479e-05, |
|
"loss": 1.1802, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 16.08, |
|
"learning_rate": 1.0579710144927536e-05, |
|
"loss": 1.2814, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 16.11, |
|
"learning_rate": 1.0507246376811594e-05, |
|
"loss": 1.2367, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 16.13, |
|
"learning_rate": 1.0434782608695651e-05, |
|
"loss": 1.2374, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 16.16, |
|
"learning_rate": 1.036231884057971e-05, |
|
"loss": 1.3441, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 16.19, |
|
"learning_rate": 1.0289855072463768e-05, |
|
"loss": 1.2469, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 16.21, |
|
"learning_rate": 1.0217391304347827e-05, |
|
"loss": 1.2178, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 16.24, |
|
"learning_rate": 1.0144927536231885e-05, |
|
"loss": 1.2353, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 16.27, |
|
"learning_rate": 1.0072463768115942e-05, |
|
"loss": 1.2735, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 16.29, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2322, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 16.32, |
|
"learning_rate": 9.92753623188406e-06, |
|
"loss": 1.2349, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 16.35, |
|
"learning_rate": 9.855072463768116e-06, |
|
"loss": 1.2346, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 16.37, |
|
"learning_rate": 9.782608695652175e-06, |
|
"loss": 1.2412, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 16.4, |
|
"learning_rate": 9.710144927536233e-06, |
|
"loss": 1.2781, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 16.43, |
|
"learning_rate": 9.63768115942029e-06, |
|
"loss": 1.2516, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 16.45, |
|
"learning_rate": 9.565217391304349e-06, |
|
"loss": 1.1951, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 16.48, |
|
"learning_rate": 9.492753623188407e-06, |
|
"loss": 1.2746, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 16.51, |
|
"learning_rate": 9.420289855072464e-06, |
|
"loss": 1.2231, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 16.53, |
|
"learning_rate": 9.347826086956523e-06, |
|
"loss": 1.2761, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 16.56, |
|
"learning_rate": 9.27536231884058e-06, |
|
"loss": 1.1835, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 16.59, |
|
"learning_rate": 9.202898550724638e-06, |
|
"loss": 1.2439, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 16.61, |
|
"learning_rate": 9.130434782608697e-06, |
|
"loss": 1.2909, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 16.64, |
|
"learning_rate": 9.057971014492753e-06, |
|
"loss": 1.2473, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"learning_rate": 8.985507246376812e-06, |
|
"loss": 1.2172, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 16.69, |
|
"learning_rate": 8.91304347826087e-06, |
|
"loss": 1.2669, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 16.72, |
|
"learning_rate": 8.840579710144927e-06, |
|
"loss": 1.2826, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 16.75, |
|
"learning_rate": 8.768115942028986e-06, |
|
"loss": 1.1676, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 16.77, |
|
"learning_rate": 8.695652173913044e-06, |
|
"loss": 1.2068, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"learning_rate": 8.623188405797101e-06, |
|
"loss": 1.2571, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 16.83, |
|
"learning_rate": 8.55072463768116e-06, |
|
"loss": 1.2138, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 16.85, |
|
"learning_rate": 8.478260869565217e-06, |
|
"loss": 1.1783, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"learning_rate": 8.405797101449275e-06, |
|
"loss": 1.2801, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 16.91, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.2122, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 16.93, |
|
"learning_rate": 8.26086956521739e-06, |
|
"loss": 1.2112, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 16.96, |
|
"learning_rate": 8.188405797101449e-06, |
|
"loss": 1.2542, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 16.99, |
|
"learning_rate": 8.115942028985508e-06, |
|
"loss": 1.2426, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 16.99, |
|
"eval_accuracy": 0.7318157181571816, |
|
"eval_loss": 1.2249630689620972, |
|
"eval_runtime": 13.2585, |
|
"eval_samples_per_second": 37.41, |
|
"eval_steps_per_second": 4.676, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 17.03, |
|
"learning_rate": 8.043478260869565e-06, |
|
"loss": 1.8211, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 17.05, |
|
"learning_rate": 7.971014492753623e-06, |
|
"loss": 1.2233, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 17.08, |
|
"learning_rate": 7.898550724637682e-06, |
|
"loss": 1.246, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 17.11, |
|
"learning_rate": 7.82608695652174e-06, |
|
"loss": 1.2303, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 17.13, |
|
"learning_rate": 7.753623188405799e-06, |
|
"loss": 1.2127, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 17.16, |
|
"learning_rate": 7.681159420289856e-06, |
|
"loss": 1.1923, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 17.19, |
|
"learning_rate": 7.608695652173914e-06, |
|
"loss": 1.2247, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 17.21, |
|
"learning_rate": 7.536231884057972e-06, |
|
"loss": 1.2365, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 17.24, |
|
"learning_rate": 7.4637681159420295e-06, |
|
"loss": 1.1985, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 17.27, |
|
"learning_rate": 7.391304347826088e-06, |
|
"loss": 1.2298, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 17.29, |
|
"learning_rate": 7.318840579710146e-06, |
|
"loss": 1.1936, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 17.32, |
|
"learning_rate": 7.246376811594203e-06, |
|
"loss": 1.2108, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 17.35, |
|
"learning_rate": 7.173913043478261e-06, |
|
"loss": 1.3043, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 17.37, |
|
"learning_rate": 7.10144927536232e-06, |
|
"loss": 1.2297, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 17.4, |
|
"learning_rate": 7.028985507246377e-06, |
|
"loss": 1.2587, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 17.43, |
|
"learning_rate": 6.956521739130435e-06, |
|
"loss": 1.2347, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 17.45, |
|
"learning_rate": 6.884057971014493e-06, |
|
"loss": 1.2355, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 17.48, |
|
"learning_rate": 6.811594202898551e-06, |
|
"loss": 1.2061, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 17.51, |
|
"learning_rate": 6.739130434782609e-06, |
|
"loss": 1.2036, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 17.53, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.2495, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 17.56, |
|
"learning_rate": 6.594202898550725e-06, |
|
"loss": 1.216, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 17.59, |
|
"learning_rate": 6.521739130434783e-06, |
|
"loss": 1.223, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 17.61, |
|
"learning_rate": 6.449275362318841e-06, |
|
"loss": 1.2645, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 17.64, |
|
"learning_rate": 6.376811594202898e-06, |
|
"loss": 1.1786, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 17.67, |
|
"learning_rate": 6.304347826086957e-06, |
|
"loss": 1.2447, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 17.69, |
|
"learning_rate": 6.231884057971015e-06, |
|
"loss": 1.2604, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 17.72, |
|
"learning_rate": 6.159420289855073e-06, |
|
"loss": 1.2098, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 17.75, |
|
"learning_rate": 6.086956521739131e-06, |
|
"loss": 1.2294, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 17.77, |
|
"learning_rate": 6.014492753623189e-06, |
|
"loss": 1.2241, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 17.8, |
|
"learning_rate": 5.942028985507247e-06, |
|
"loss": 1.2454, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 17.83, |
|
"learning_rate": 5.869565217391305e-06, |
|
"loss": 1.2278, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 17.85, |
|
"learning_rate": 5.797101449275362e-06, |
|
"loss": 1.2871, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 17.88, |
|
"learning_rate": 5.724637681159421e-06, |
|
"loss": 1.2772, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 17.91, |
|
"learning_rate": 5.652173913043479e-06, |
|
"loss": 1.2415, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 17.93, |
|
"learning_rate": 5.579710144927536e-06, |
|
"loss": 1.1829, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 17.96, |
|
"learning_rate": 5.507246376811594e-06, |
|
"loss": 1.228, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 17.99, |
|
"learning_rate": 5.4347826086956525e-06, |
|
"loss": 1.2096, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 17.99, |
|
"eval_accuracy": 0.7352733398543636, |
|
"eval_loss": 1.2186108827590942, |
|
"eval_runtime": 13.2382, |
|
"eval_samples_per_second": 37.467, |
|
"eval_steps_per_second": 4.683, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 18.03, |
|
"learning_rate": 5.36231884057971e-06, |
|
"loss": 1.812, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 18.05, |
|
"learning_rate": 5.289855072463768e-06, |
|
"loss": 1.1976, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 18.08, |
|
"learning_rate": 5.217391304347826e-06, |
|
"loss": 1.2782, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 18.11, |
|
"learning_rate": 5.144927536231884e-06, |
|
"loss": 1.2505, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 18.13, |
|
"learning_rate": 5.072463768115943e-06, |
|
"loss": 1.2352, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 18.16, |
|
"learning_rate": 5e-06, |
|
"loss": 1.2544, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 18.19, |
|
"learning_rate": 4.927536231884058e-06, |
|
"loss": 1.1739, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 18.21, |
|
"learning_rate": 4.855072463768117e-06, |
|
"loss": 1.2626, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 18.24, |
|
"learning_rate": 4.782608695652174e-06, |
|
"loss": 1.2034, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 18.27, |
|
"learning_rate": 4.710144927536232e-06, |
|
"loss": 1.2818, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 18.29, |
|
"learning_rate": 4.63768115942029e-06, |
|
"loss": 1.2388, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 18.32, |
|
"learning_rate": 4.565217391304348e-06, |
|
"loss": 1.239, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 18.35, |
|
"learning_rate": 4.492753623188406e-06, |
|
"loss": 1.1827, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 18.37, |
|
"learning_rate": 4.420289855072464e-06, |
|
"loss": 1.1712, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 18.4, |
|
"learning_rate": 4.347826086956522e-06, |
|
"loss": 1.2147, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 18.43, |
|
"learning_rate": 4.27536231884058e-06, |
|
"loss": 1.2106, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 18.45, |
|
"learning_rate": 4.202898550724638e-06, |
|
"loss": 1.2296, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 18.48, |
|
"learning_rate": 4.130434782608695e-06, |
|
"loss": 1.2551, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 18.51, |
|
"learning_rate": 4.057971014492754e-06, |
|
"loss": 1.235, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 18.53, |
|
"learning_rate": 3.9855072463768115e-06, |
|
"loss": 1.2341, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 18.56, |
|
"learning_rate": 3.91304347826087e-06, |
|
"loss": 1.189, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 18.59, |
|
"learning_rate": 3.840579710144928e-06, |
|
"loss": 1.2241, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 18.61, |
|
"learning_rate": 3.768115942028986e-06, |
|
"loss": 1.2627, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 18.64, |
|
"learning_rate": 3.695652173913044e-06, |
|
"loss": 1.2259, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 18.67, |
|
"learning_rate": 3.6231884057971017e-06, |
|
"loss": 1.2247, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 18.69, |
|
"learning_rate": 3.55072463768116e-06, |
|
"loss": 1.2493, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 18.72, |
|
"learning_rate": 3.4782608695652175e-06, |
|
"loss": 1.1931, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 3.4057971014492756e-06, |
|
"loss": 1.2441, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 18.77, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.1884, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 18.8, |
|
"learning_rate": 3.2608695652173914e-06, |
|
"loss": 1.1858, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 18.83, |
|
"learning_rate": 3.188405797101449e-06, |
|
"loss": 1.2114, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 18.85, |
|
"learning_rate": 3.1159420289855077e-06, |
|
"loss": 1.2482, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 18.88, |
|
"learning_rate": 3.0434782608695654e-06, |
|
"loss": 1.2253, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 18.91, |
|
"learning_rate": 2.9710144927536235e-06, |
|
"loss": 1.2292, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 18.93, |
|
"learning_rate": 2.898550724637681e-06, |
|
"loss": 1.2396, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 18.96, |
|
"learning_rate": 2.8260869565217393e-06, |
|
"loss": 1.1881, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 18.99, |
|
"learning_rate": 2.753623188405797e-06, |
|
"loss": 1.1961, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 18.99, |
|
"eval_accuracy": 0.7360741986223355, |
|
"eval_loss": 1.2214456796646118, |
|
"eval_runtime": 13.2148, |
|
"eval_samples_per_second": 37.534, |
|
"eval_steps_per_second": 4.692, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 19.03, |
|
"learning_rate": 2.681159420289855e-06, |
|
"loss": 1.8177, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 19.05, |
|
"learning_rate": 2.608695652173913e-06, |
|
"loss": 1.2484, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 19.08, |
|
"learning_rate": 2.5362318840579714e-06, |
|
"loss": 1.2075, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 19.11, |
|
"learning_rate": 2.463768115942029e-06, |
|
"loss": 1.244, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 19.13, |
|
"learning_rate": 2.391304347826087e-06, |
|
"loss": 1.2501, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 19.16, |
|
"learning_rate": 2.318840579710145e-06, |
|
"loss": 1.2372, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 19.19, |
|
"learning_rate": 2.246376811594203e-06, |
|
"loss": 1.212, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 19.21, |
|
"learning_rate": 2.173913043478261e-06, |
|
"loss": 1.2428, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 19.24, |
|
"learning_rate": 2.101449275362319e-06, |
|
"loss": 1.2565, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 19.27, |
|
"learning_rate": 2.028985507246377e-06, |
|
"loss": 1.2166, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 19.29, |
|
"learning_rate": 1.956521739130435e-06, |
|
"loss": 1.267, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 19.32, |
|
"learning_rate": 1.884057971014493e-06, |
|
"loss": 1.1961, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 19.35, |
|
"learning_rate": 1.8115942028985508e-06, |
|
"loss": 1.2229, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"learning_rate": 1.7391304347826088e-06, |
|
"loss": 1.246, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 19.4, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 1.2767, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"learning_rate": 1.5942028985507246e-06, |
|
"loss": 1.1875, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 19.45, |
|
"learning_rate": 1.5217391304347827e-06, |
|
"loss": 1.2265, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 19.48, |
|
"learning_rate": 1.4492753623188406e-06, |
|
"loss": 1.2228, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 19.51, |
|
"learning_rate": 1.3768115942028985e-06, |
|
"loss": 1.2094, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 19.53, |
|
"learning_rate": 1.3043478260869564e-06, |
|
"loss": 1.216, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 19.56, |
|
"learning_rate": 1.2318840579710145e-06, |
|
"loss": 1.249, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 19.59, |
|
"learning_rate": 1.1594202898550724e-06, |
|
"loss": 1.2173, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 19.61, |
|
"learning_rate": 1.0869565217391306e-06, |
|
"loss": 1.2106, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 19.64, |
|
"learning_rate": 1.0144927536231885e-06, |
|
"loss": 1.2393, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 19.67, |
|
"learning_rate": 9.420289855072465e-07, |
|
"loss": 1.2367, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 19.69, |
|
"learning_rate": 8.695652173913044e-07, |
|
"loss": 1.2037, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 19.72, |
|
"learning_rate": 7.971014492753623e-07, |
|
"loss": 1.2176, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 19.75, |
|
"learning_rate": 7.246376811594203e-07, |
|
"loss": 1.1534, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 19.77, |
|
"learning_rate": 6.521739130434782e-07, |
|
"loss": 1.2168, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 19.8, |
|
"learning_rate": 5.797101449275362e-07, |
|
"loss": 1.203, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 19.83, |
|
"learning_rate": 5.072463768115942e-07, |
|
"loss": 1.2523, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 19.85, |
|
"learning_rate": 4.347826086956522e-07, |
|
"loss": 1.2665, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 19.88, |
|
"learning_rate": 3.6231884057971015e-07, |
|
"loss": 1.2114, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 19.91, |
|
"learning_rate": 2.898550724637681e-07, |
|
"loss": 1.2425, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 19.93, |
|
"learning_rate": 2.173913043478261e-07, |
|
"loss": 1.3178, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 19.96, |
|
"learning_rate": 1.4492753623188405e-07, |
|
"loss": 1.2353, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 19.99, |
|
"learning_rate": 7.246376811594203e-08, |
|
"loss": 1.2136, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 19.99, |
|
"eval_accuracy": 0.7311184760057123, |
|
"eval_loss": 1.250640869140625, |
|
"eval_runtime": 13.2938, |
|
"eval_samples_per_second": 37.311, |
|
"eval_steps_per_second": 4.664, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 19.99, |
|
"step": 740, |
|
"total_flos": 2.524663139915981e+16, |
|
"train_loss": 1.3203882475157043, |
|
"train_runtime": 9311.1992, |
|
"train_samples_per_second": 10.306, |
|
"train_steps_per_second": 0.079 |
|
} |
|
], |
|
"max_steps": 740, |
|
"num_train_epochs": 20, |
|
"total_flos": 2.524663139915981e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|