|
{ |
|
"best_metric": 0.13513699173927307, |
|
"best_model_checkpoint": "text-emotion-classifier-distilbert/checkpoint-1950", |
|
"epoch": 13.0, |
|
"eval_steps": 1, |
|
"global_step": 1950, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4e-08, |
|
"loss": 1.0825, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8e-08, |
|
"loss": 1.1225, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2e-07, |
|
"loss": 1.0905, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6e-07, |
|
"loss": 1.1366, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2e-07, |
|
"loss": 1.1014, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.4e-07, |
|
"loss": 1.0931, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.8e-07, |
|
"loss": 1.0652, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.2e-07, |
|
"loss": 1.1165, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.6e-07, |
|
"loss": 1.1065, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4e-07, |
|
"loss": 1.1256, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.3999999999999997e-07, |
|
"loss": 1.1515, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8e-07, |
|
"loss": 1.103, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.2e-07, |
|
"loss": 1.0769, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.6e-07, |
|
"loss": 1.101, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6e-07, |
|
"loss": 1.1066, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 6.4e-07, |
|
"loss": 1.1365, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 6.800000000000001e-07, |
|
"loss": 1.063, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.2e-07, |
|
"loss": 1.1014, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 7.599999999999999e-07, |
|
"loss": 1.1079, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8e-07, |
|
"loss": 1.0915, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.399999999999999e-07, |
|
"loss": 1.1342, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.799999999999999e-07, |
|
"loss": 1.1188, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.2e-07, |
|
"loss": 1.0889, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.6e-07, |
|
"loss": 1.0921, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1e-06, |
|
"loss": 1.1061, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.04e-06, |
|
"loss": 1.1493, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.08e-06, |
|
"loss": 1.0383, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.12e-06, |
|
"loss": 1.1648, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.16e-06, |
|
"loss": 1.131, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.2e-06, |
|
"loss": 1.0875, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.24e-06, |
|
"loss": 1.1012, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.28e-06, |
|
"loss": 1.1321, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.32e-06, |
|
"loss": 1.0796, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.3600000000000001e-06, |
|
"loss": 1.0892, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.4e-06, |
|
"loss": 1.1048, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.44e-06, |
|
"loss": 1.0683, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.48e-06, |
|
"loss": 1.09, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.5199999999999998e-06, |
|
"loss": 1.1109, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.5599999999999999e-06, |
|
"loss": 1.1116, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.6e-06, |
|
"loss": 1.1195, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.6399999999999998e-06, |
|
"loss": 1.0677, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6799999999999998e-06, |
|
"loss": 1.1448, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7199999999999998e-06, |
|
"loss": 1.0935, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7599999999999999e-06, |
|
"loss": 1.077, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8e-06, |
|
"loss": 1.1167, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.84e-06, |
|
"loss": 1.0701, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8799999999999998e-06, |
|
"loss": 1.1005, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.92e-06, |
|
"loss": 1.0671, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.96e-06, |
|
"loss": 1.0934, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2e-06, |
|
"loss": 1.1116, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.999322033898305e-06, |
|
"loss": 1.1154, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.99864406779661e-06, |
|
"loss": 1.0956, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.997966101694915e-06, |
|
"loss": 1.0935, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.99728813559322e-06, |
|
"loss": 1.0894, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.996610169491525e-06, |
|
"loss": 1.0766, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9959322033898307e-06, |
|
"loss": 1.1362, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9952542372881357e-06, |
|
"loss": 1.0796, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9945762711864408e-06, |
|
"loss": 1.1127, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.993898305084746e-06, |
|
"loss": 1.1046, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.993220338983051e-06, |
|
"loss": 1.1021, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.992542372881356e-06, |
|
"loss": 1.12, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.991864406779661e-06, |
|
"loss": 1.0773, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.991186440677966e-06, |
|
"loss": 1.0872, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.990508474576271e-06, |
|
"loss": 1.0911, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.989830508474576e-06, |
|
"loss": 1.1071, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9891525423728812e-06, |
|
"loss": 1.1356, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9884745762711863e-06, |
|
"loss": 1.1178, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9877966101694913e-06, |
|
"loss": 1.1216, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9871186440677964e-06, |
|
"loss": 1.1022, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9864406779661014e-06, |
|
"loss": 1.0726, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9857627118644065e-06, |
|
"loss": 1.1405, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.985084745762712e-06, |
|
"loss": 1.0563, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.984406779661017e-06, |
|
"loss": 1.1061, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.983728813559322e-06, |
|
"loss": 1.1059, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.983050847457627e-06, |
|
"loss": 1.1053, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.982372881355932e-06, |
|
"loss": 1.076, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9816949152542373e-06, |
|
"loss": 1.0533, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9810169491525423e-06, |
|
"loss": 1.0698, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9803389830508474e-06, |
|
"loss": 1.0742, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9796610169491524e-06, |
|
"loss": 1.0577, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9789830508474575e-06, |
|
"loss": 1.1312, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9783050847457625e-06, |
|
"loss": 1.0969, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9776271186440676e-06, |
|
"loss": 1.108, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.9769491525423726e-06, |
|
"loss": 1.0715, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9762711864406777e-06, |
|
"loss": 1.1095, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9755932203389827e-06, |
|
"loss": 1.1169, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.974915254237288e-06, |
|
"loss": 1.1105, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.9742372881355933e-06, |
|
"loss": 1.1455, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.9735593220338983e-06, |
|
"loss": 1.1055, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9728813559322034e-06, |
|
"loss": 1.1118, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9722033898305084e-06, |
|
"loss": 1.0656, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9715254237288135e-06, |
|
"loss": 1.1251, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9708474576271186e-06, |
|
"loss": 1.0446, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9701694915254236e-06, |
|
"loss": 1.0961, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9694915254237287e-06, |
|
"loss": 1.1304, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.9688135593220337e-06, |
|
"loss": 1.0913, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.9681355932203388e-06, |
|
"loss": 1.0938, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.967457627118644e-06, |
|
"loss": 1.0958, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9667796610169493e-06, |
|
"loss": 1.0847, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9661016949152544e-06, |
|
"loss": 1.0685, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9654237288135594e-06, |
|
"loss": 1.0825, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.964745762711864e-06, |
|
"loss": 1.0761, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.964067796610169e-06, |
|
"loss": 1.0941, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.9633898305084746e-06, |
|
"loss": 1.1172, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.9627118644067796e-06, |
|
"loss": 1.0578, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9620338983050847e-06, |
|
"loss": 1.1347, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9613559322033898e-06, |
|
"loss": 1.0605, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.960677966101695e-06, |
|
"loss": 1.0932, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.96e-06, |
|
"loss": 1.071, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.959322033898305e-06, |
|
"loss": 1.0858, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.95864406779661e-06, |
|
"loss": 1.0561, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.957966101694915e-06, |
|
"loss": 1.0732, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.95728813559322e-06, |
|
"loss": 1.0731, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.956610169491525e-06, |
|
"loss": 1.085, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.9559322033898306e-06, |
|
"loss": 1.0778, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.9552542372881357e-06, |
|
"loss": 1.0489, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.9545762711864407e-06, |
|
"loss": 1.0719, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.953898305084746e-06, |
|
"loss": 1.0752, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.953220338983051e-06, |
|
"loss": 1.0763, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.952542372881356e-06, |
|
"loss": 1.0847, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.951864406779661e-06, |
|
"loss": 1.0884, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.951186440677966e-06, |
|
"loss": 1.0877, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.950508474576271e-06, |
|
"loss": 1.074, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.949830508474576e-06, |
|
"loss": 1.0542, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.949152542372881e-06, |
|
"loss": 1.0796, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.9484745762711862e-06, |
|
"loss": 1.0593, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.9477966101694913e-06, |
|
"loss": 1.0454, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.9471186440677963e-06, |
|
"loss": 1.117, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9464406779661014e-06, |
|
"loss": 1.0852, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9457627118644065e-06, |
|
"loss": 1.059, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.945084745762712e-06, |
|
"loss": 1.0474, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.944406779661017e-06, |
|
"loss": 1.0996, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.943728813559322e-06, |
|
"loss": 1.0735, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.943050847457627e-06, |
|
"loss": 1.0683, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.942372881355932e-06, |
|
"loss": 1.063, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.941694915254237e-06, |
|
"loss": 1.063, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.9410169491525423e-06, |
|
"loss": 1.0754, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.9403389830508473e-06, |
|
"loss": 1.0264, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.9396610169491524e-06, |
|
"loss": 1.045, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.9389830508474574e-06, |
|
"loss": 1.0713, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.938305084745763e-06, |
|
"loss": 1.0788, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.937627118644068e-06, |
|
"loss": 1.0502, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.936949152542373e-06, |
|
"loss": 1.0731, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9362711864406777e-06, |
|
"loss": 1.1199, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9355932203389827e-06, |
|
"loss": 1.0752, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9349152542372878e-06, |
|
"loss": 1.0377, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.9342372881355932e-06, |
|
"loss": 1.0764, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.9335593220338983e-06, |
|
"loss": 1.0315, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.9328813559322034e-06, |
|
"loss": 1.076, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.9322033898305084e-06, |
|
"loss": 1.0424, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.485, |
|
"eval_loss": 1.060792088508606, |
|
"eval_runtime": 2.1186, |
|
"eval_samples_per_second": 566.416, |
|
"eval_steps_per_second": 4.72, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.9315254237288135e-06, |
|
"loss": 1.1058, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.9308474576271185e-06, |
|
"loss": 1.0661, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.9301694915254236e-06, |
|
"loss": 1.0798, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.9294915254237286e-06, |
|
"loss": 1.0944, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.9288135593220337e-06, |
|
"loss": 1.0704, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.9281355932203387e-06, |
|
"loss": 1.0725, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.9274576271186442e-06, |
|
"loss": 0.9912, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.9267796610169493e-06, |
|
"loss": 1.032, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.9261016949152543e-06, |
|
"loss": 1.0375, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.9254237288135594e-06, |
|
"loss": 1.072, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.9247457627118644e-06, |
|
"loss": 1.0051, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.9240677966101695e-06, |
|
"loss": 1.0478, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.9233898305084746e-06, |
|
"loss": 1.0166, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.9227118644067796e-06, |
|
"loss": 1.077, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.9220338983050847e-06, |
|
"loss": 1.0317, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.9213559322033897e-06, |
|
"loss": 1.0481, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.9206779661016948e-06, |
|
"loss": 1.0667, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.92e-06, |
|
"loss": 1.0739, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.919322033898305e-06, |
|
"loss": 1.05, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.91864406779661e-06, |
|
"loss": 1.0537, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.917966101694915e-06, |
|
"loss": 1.0969, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.91728813559322e-06, |
|
"loss": 1.0318, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.9166101694915255e-06, |
|
"loss": 1.056, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.9159322033898306e-06, |
|
"loss": 1.0636, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9152542372881356e-06, |
|
"loss": 1.0538, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9145762711864407e-06, |
|
"loss": 1.0352, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.9138983050847458e-06, |
|
"loss": 1.0684, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.913220338983051e-06, |
|
"loss": 1.0111, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.912542372881356e-06, |
|
"loss": 1.0422, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.911864406779661e-06, |
|
"loss": 0.9925, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.911186440677966e-06, |
|
"loss": 0.9835, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.910508474576271e-06, |
|
"loss": 1.0539, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.909830508474576e-06, |
|
"loss": 1.0679, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.9091525423728816e-06, |
|
"loss": 1.0667, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.9084745762711866e-06, |
|
"loss": 1.089, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.9077966101694912e-06, |
|
"loss": 1.0636, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.9071186440677965e-06, |
|
"loss": 1.0225, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.9064406779661016e-06, |
|
"loss": 1.1044, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.9057627118644066e-06, |
|
"loss": 1.0323, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.9050847457627117e-06, |
|
"loss": 1.066, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.9044067796610167e-06, |
|
"loss": 1.072, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.903728813559322e-06, |
|
"loss": 0.9809, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.903050847457627e-06, |
|
"loss": 1.0569, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.9023728813559321e-06, |
|
"loss": 1.036, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.9016949152542372e-06, |
|
"loss": 1.0333, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.9010169491525422e-06, |
|
"loss": 1.071, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.9003389830508475e-06, |
|
"loss": 1.0687, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.8996610169491525e-06, |
|
"loss": 1.0969, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.8989830508474576e-06, |
|
"loss": 1.0175, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.8983050847457627e-06, |
|
"loss": 1.0223, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.8976271186440677e-06, |
|
"loss": 0.9534, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.8969491525423728e-06, |
|
"loss": 1.0213, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.896271186440678e-06, |
|
"loss": 1.013, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.895593220338983e-06, |
|
"loss": 1.0241, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.8949152542372881e-06, |
|
"loss": 1.0355, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.894237288135593e-06, |
|
"loss": 1.0511, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.893559322033898e-06, |
|
"loss": 1.0394, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.8928813559322033e-06, |
|
"loss": 0.953, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.8922033898305084e-06, |
|
"loss": 1.002, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.8915254237288134e-06, |
|
"loss": 1.0233, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.8908474576271185e-06, |
|
"loss": 1.0208, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.8901694915254235e-06, |
|
"loss": 1.0506, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.8894915254237286e-06, |
|
"loss": 1.046, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.8888135593220339e-06, |
|
"loss": 1.0002, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.888135593220339e-06, |
|
"loss": 0.9716, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.887457627118644e-06, |
|
"loss": 1.0353, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.886779661016949e-06, |
|
"loss": 0.9895, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.886101694915254e-06, |
|
"loss": 1.0655, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.8854237288135593e-06, |
|
"loss": 0.9634, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.8847457627118644e-06, |
|
"loss": 0.9876, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.8840677966101695e-06, |
|
"loss": 0.9716, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.8833898305084745e-06, |
|
"loss": 1.0419, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.8827118644067796e-06, |
|
"loss": 1.0206, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.8820338983050848e-06, |
|
"loss": 1.0537, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.8813559322033899e-06, |
|
"loss": 1.0073, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.880677966101695e-06, |
|
"loss": 1.0212, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.8799999999999998e-06, |
|
"loss": 0.998, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.8793220338983048e-06, |
|
"loss": 0.9832, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.87864406779661e-06, |
|
"loss": 0.9789, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.8779661016949152e-06, |
|
"loss": 1.0204, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.8772881355932202e-06, |
|
"loss": 1.0199, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.8766101694915253e-06, |
|
"loss": 1.0431, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.8759322033898303e-06, |
|
"loss": 0.9954, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.8752542372881354e-06, |
|
"loss": 0.979, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.8745762711864407e-06, |
|
"loss": 1.0617, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.8738983050847457e-06, |
|
"loss": 0.9424, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.8732203389830508e-06, |
|
"loss": 0.9906, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.8725423728813558e-06, |
|
"loss": 0.9598, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.8718644067796609e-06, |
|
"loss": 0.9742, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.8711864406779661e-06, |
|
"loss": 0.9749, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.8705084745762712e-06, |
|
"loss": 0.9873, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.8698305084745763e-06, |
|
"loss": 1.023, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.8691525423728813e-06, |
|
"loss": 0.9876, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.8684745762711864e-06, |
|
"loss": 0.993, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.8677966101694916e-06, |
|
"loss": 0.933, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.8671186440677967e-06, |
|
"loss": 0.997, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.8664406779661017e-06, |
|
"loss": 0.9932, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.8657627118644066e-06, |
|
"loss": 0.975, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.8650847457627116e-06, |
|
"loss": 0.9366, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.8644067796610167e-06, |
|
"loss": 0.9404, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.863728813559322e-06, |
|
"loss": 0.9429, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.863050847457627e-06, |
|
"loss": 0.9718, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.862372881355932e-06, |
|
"loss": 0.9689, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.8616949152542371e-06, |
|
"loss": 0.9405, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.8610169491525422e-06, |
|
"loss": 0.9855, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.8603389830508475e-06, |
|
"loss": 0.9498, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.8596610169491525e-06, |
|
"loss": 0.9855, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.8589830508474576e-06, |
|
"loss": 0.9612, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.8583050847457626e-06, |
|
"loss": 0.9946, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.8576271186440677e-06, |
|
"loss": 0.9795, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.8569491525423727e-06, |
|
"loss": 0.9826, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.856271186440678e-06, |
|
"loss": 0.9288, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.855593220338983e-06, |
|
"loss": 0.9907, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8549152542372881e-06, |
|
"loss": 0.9485, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.8542372881355932e-06, |
|
"loss": 0.9506, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.8535593220338982e-06, |
|
"loss": 1.0025, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.8528813559322035e-06, |
|
"loss": 0.904, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.8522033898305085e-06, |
|
"loss": 0.9143, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.8515254237288134e-06, |
|
"loss": 0.9907, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.8508474576271184e-06, |
|
"loss": 0.9071, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.8501694915254235e-06, |
|
"loss": 0.8728, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.8494915254237288e-06, |
|
"loss": 0.886, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.8488135593220338e-06, |
|
"loss": 0.9176, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.8481355932203389e-06, |
|
"loss": 0.9395, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.847457627118644e-06, |
|
"loss": 1.0047, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.846779661016949e-06, |
|
"loss": 0.9278, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.846101694915254e-06, |
|
"loss": 0.8953, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.8454237288135593e-06, |
|
"loss": 0.8891, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8447457627118644e-06, |
|
"loss": 0.9794, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8440677966101694e-06, |
|
"loss": 0.9192, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8433898305084745e-06, |
|
"loss": 0.8931, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8427118644067795e-06, |
|
"loss": 0.9, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8420338983050848e-06, |
|
"loss": 0.928, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8413559322033898e-06, |
|
"loss": 0.9631, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.840677966101695e-06, |
|
"loss": 0.9365, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.84e-06, |
|
"loss": 0.8809, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.839322033898305e-06, |
|
"loss": 0.9445, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8386440677966103e-06, |
|
"loss": 0.8565, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.8379661016949151e-06, |
|
"loss": 0.9079, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.8372881355932202e-06, |
|
"loss": 0.8988, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.8366101694915252e-06, |
|
"loss": 0.9305, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.8359322033898303e-06, |
|
"loss": 0.8694, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.8352542372881353e-06, |
|
"loss": 0.91, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.8345762711864406e-06, |
|
"loss": 0.9222, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.8338983050847457e-06, |
|
"loss": 0.8507, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.8332203389830507e-06, |
|
"loss": 0.8134, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.8325423728813558e-06, |
|
"loss": 0.8757, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.8318644067796608e-06, |
|
"loss": 0.9181, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.831186440677966e-06, |
|
"loss": 0.9024, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.8305084745762712e-06, |
|
"loss": 0.9063, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.7133333333333334, |
|
"eval_loss": 0.8667357563972473, |
|
"eval_runtime": 2.1333, |
|
"eval_samples_per_second": 562.519, |
|
"eval_steps_per_second": 4.688, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.8298305084745762e-06, |
|
"loss": 0.8751, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.8291525423728813e-06, |
|
"loss": 0.8468, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.8284745762711863e-06, |
|
"loss": 0.8219, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.8277966101694916e-06, |
|
"loss": 0.9336, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.8271186440677966e-06, |
|
"loss": 0.7976, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.8264406779661017e-06, |
|
"loss": 0.8596, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.8257627118644068e-06, |
|
"loss": 0.9149, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.8250847457627118e-06, |
|
"loss": 0.8966, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.8244067796610169e-06, |
|
"loss": 0.8069, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.823728813559322e-06, |
|
"loss": 0.7944, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.823050847457627e-06, |
|
"loss": 0.8648, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.822372881355932e-06, |
|
"loss": 0.8707, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.821694915254237e-06, |
|
"loss": 0.9647, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.8210169491525421e-06, |
|
"loss": 0.8766, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.8203389830508474e-06, |
|
"loss": 0.8581, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.8196610169491525e-06, |
|
"loss": 0.8455, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.8189830508474575e-06, |
|
"loss": 0.8239, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.8183050847457626e-06, |
|
"loss": 0.8324, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.8176271186440676e-06, |
|
"loss": 0.8952, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.816949152542373e-06, |
|
"loss": 0.784, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.816271186440678e-06, |
|
"loss": 0.7909, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.815593220338983e-06, |
|
"loss": 0.8393, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.814915254237288e-06, |
|
"loss": 0.7742, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.8142372881355931e-06, |
|
"loss": 0.8125, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.8135593220338982e-06, |
|
"loss": 0.8642, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.8128813559322034e-06, |
|
"loss": 0.8029, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.8122033898305085e-06, |
|
"loss": 0.7896, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.8115254237288136e-06, |
|
"loss": 0.7172, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.8108474576271186e-06, |
|
"loss": 0.7582, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.8101694915254237e-06, |
|
"loss": 0.925, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.8094915254237287e-06, |
|
"loss": 0.7542, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.8088135593220338e-06, |
|
"loss": 0.7409, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.8081355932203388e-06, |
|
"loss": 0.8217, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.8074576271186439e-06, |
|
"loss": 0.7942, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.806779661016949e-06, |
|
"loss": 0.798, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.8061016949152542e-06, |
|
"loss": 0.7975, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.8054237288135593e-06, |
|
"loss": 0.7211, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.8047457627118643e-06, |
|
"loss": 0.8354, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.8040677966101694e-06, |
|
"loss": 0.8051, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.8033898305084744e-06, |
|
"loss": 0.8015, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.8027118644067795e-06, |
|
"loss": 0.8058, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.8020338983050848e-06, |
|
"loss": 0.8007, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.8013559322033898e-06, |
|
"loss": 0.8089, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.8006779661016949e-06, |
|
"loss": 0.7677, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.8e-06, |
|
"loss": 0.7971, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.799322033898305e-06, |
|
"loss": 0.7407, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.7986440677966102e-06, |
|
"loss": 0.7246, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.7979661016949153e-06, |
|
"loss": 0.7908, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.7972881355932204e-06, |
|
"loss": 0.7823, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.7966101694915254e-06, |
|
"loss": 0.8502, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.7959322033898305e-06, |
|
"loss": 0.7582, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.7952542372881353e-06, |
|
"loss": 0.7133, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.7945762711864406e-06, |
|
"loss": 0.7181, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.7938983050847456e-06, |
|
"loss": 0.7364, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.7932203389830507e-06, |
|
"loss": 0.812, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.7925423728813557e-06, |
|
"loss": 0.7124, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.7918644067796608e-06, |
|
"loss": 0.7259, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.791186440677966e-06, |
|
"loss": 0.7452, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.7905084745762711e-06, |
|
"loss": 0.8349, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.7898305084745762e-06, |
|
"loss": 0.633, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.7891525423728812e-06, |
|
"loss": 0.6854, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.7884745762711863e-06, |
|
"loss": 0.7067, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.7877966101694916e-06, |
|
"loss": 0.6715, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.7871186440677966e-06, |
|
"loss": 0.7378, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.7864406779661017e-06, |
|
"loss": 0.6822, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7857627118644067e-06, |
|
"loss": 0.6902, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7850847457627118e-06, |
|
"loss": 0.6769, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.784406779661017e-06, |
|
"loss": 0.6375, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.783728813559322e-06, |
|
"loss": 0.6882, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.7830508474576271e-06, |
|
"loss": 0.6189, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.7823728813559322e-06, |
|
"loss": 0.7674, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7816949152542373e-06, |
|
"loss": 0.693, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.781016949152542e-06, |
|
"loss": 0.6153, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.7803389830508474e-06, |
|
"loss": 0.7488, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.7796610169491524e-06, |
|
"loss": 0.6897, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.7789830508474575e-06, |
|
"loss": 0.7201, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.7783050847457625e-06, |
|
"loss": 0.6733, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.7776271186440676e-06, |
|
"loss": 0.696, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.7769491525423729e-06, |
|
"loss": 0.5612, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.776271186440678e-06, |
|
"loss": 0.6796, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.775593220338983e-06, |
|
"loss": 0.7384, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.774915254237288e-06, |
|
"loss": 0.6982, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.774237288135593e-06, |
|
"loss": 0.7704, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.7735593220338983e-06, |
|
"loss": 0.6095, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.7728813559322034e-06, |
|
"loss": 0.6375, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.7722033898305085e-06, |
|
"loss": 0.5513, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.7715254237288135e-06, |
|
"loss": 0.6198, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.7708474576271186e-06, |
|
"loss": 0.7227, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.7701694915254236e-06, |
|
"loss": 0.5921, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.7694915254237289e-06, |
|
"loss": 0.6074, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.768813559322034e-06, |
|
"loss": 0.5944, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.768135593220339e-06, |
|
"loss": 0.7306, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.7674576271186438e-06, |
|
"loss": 0.7679, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.766779661016949e-06, |
|
"loss": 0.6495, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.7661016949152542e-06, |
|
"loss": 0.6567, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.7654237288135592e-06, |
|
"loss": 0.6356, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.7647457627118643e-06, |
|
"loss": 0.6007, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.7640677966101693e-06, |
|
"loss": 0.5883, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.7633898305084744e-06, |
|
"loss": 0.6513, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.7627118644067794e-06, |
|
"loss": 0.6602, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.7620338983050847e-06, |
|
"loss": 0.5831, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.7613559322033898e-06, |
|
"loss": 0.6705, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.7606779661016948e-06, |
|
"loss": 0.7556, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.7599999999999999e-06, |
|
"loss": 0.6033, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.759322033898305e-06, |
|
"loss": 0.7712, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.7586440677966102e-06, |
|
"loss": 0.6089, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.7579661016949153e-06, |
|
"loss": 0.6834, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.7572881355932203e-06, |
|
"loss": 0.6176, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.7566101694915254e-06, |
|
"loss": 0.6615, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.7559322033898304e-06, |
|
"loss": 0.5539, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.7552542372881357e-06, |
|
"loss": 0.5066, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.7545762711864407e-06, |
|
"loss": 0.5326, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.7538983050847458e-06, |
|
"loss": 0.6448, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.7532203389830506e-06, |
|
"loss": 0.6307, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.7525423728813557e-06, |
|
"loss": 0.5408, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.7518644067796608e-06, |
|
"loss": 0.5494, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.751186440677966e-06, |
|
"loss": 0.5943, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.750508474576271e-06, |
|
"loss": 0.7255, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.7498305084745761e-06, |
|
"loss": 0.5502, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.7491525423728812e-06, |
|
"loss": 0.5747, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.7484745762711862e-06, |
|
"loss": 0.7147, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.7477966101694915e-06, |
|
"loss": 0.5059, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.7471186440677966e-06, |
|
"loss": 0.5937, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.7464406779661016e-06, |
|
"loss": 0.5577, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.7457627118644067e-06, |
|
"loss": 0.6441, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.7450847457627117e-06, |
|
"loss": 0.6555, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.744406779661017e-06, |
|
"loss": 0.5725, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.743728813559322e-06, |
|
"loss": 0.7748, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.7430508474576271e-06, |
|
"loss": 0.6581, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.7423728813559322e-06, |
|
"loss": 0.7125, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.7416949152542372e-06, |
|
"loss": 0.5245, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.7410169491525423e-06, |
|
"loss": 0.672, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.7403389830508475e-06, |
|
"loss": 0.5478, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.7396610169491526e-06, |
|
"loss": 0.5794, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.7389830508474574e-06, |
|
"loss": 0.5407, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.7383050847457625e-06, |
|
"loss": 0.7596, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.7376271186440676e-06, |
|
"loss": 0.5474, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.7369491525423728e-06, |
|
"loss": 0.5168, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.7362711864406779e-06, |
|
"loss": 0.5813, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.735593220338983e-06, |
|
"loss": 0.6007, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.734915254237288e-06, |
|
"loss": 0.6333, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.734237288135593e-06, |
|
"loss": 0.6517, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.7335593220338983e-06, |
|
"loss": 0.708, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.7328813559322034e-06, |
|
"loss": 0.5086, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.7322033898305084e-06, |
|
"loss": 0.6153, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.7315254237288135e-06, |
|
"loss": 0.5577, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.7308474576271185e-06, |
|
"loss": 0.5158, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.7301694915254236e-06, |
|
"loss": 0.588, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.7294915254237289e-06, |
|
"loss": 0.5367, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.728813559322034e-06, |
|
"loss": 0.5797, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.855, |
|
"eval_loss": 0.5155936479568481, |
|
"eval_runtime": 2.1495, |
|
"eval_samples_per_second": 558.264, |
|
"eval_steps_per_second": 4.652, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.728135593220339e-06, |
|
"loss": 0.613, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.727457627118644e-06, |
|
"loss": 0.5857, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.726779661016949e-06, |
|
"loss": 0.5536, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.7261016949152543e-06, |
|
"loss": 0.5302, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.7254237288135594e-06, |
|
"loss": 0.4908, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.7247457627118642e-06, |
|
"loss": 0.4933, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.7240677966101693e-06, |
|
"loss": 0.5044, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.7233898305084743e-06, |
|
"loss": 0.5629, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.7227118644067796e-06, |
|
"loss": 0.4751, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.7220338983050847e-06, |
|
"loss": 0.7558, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.7213559322033897e-06, |
|
"loss": 0.6268, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.7206779661016948e-06, |
|
"loss": 0.5394, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.7199999999999998e-06, |
|
"loss": 0.4676, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.7193220338983049e-06, |
|
"loss": 0.5127, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.7186440677966102e-06, |
|
"loss": 0.6788, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.7179661016949152e-06, |
|
"loss": 0.464, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.7172881355932203e-06, |
|
"loss": 0.5642, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.7166101694915253e-06, |
|
"loss": 0.4097, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.7159322033898304e-06, |
|
"loss": 0.7187, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.7152542372881356e-06, |
|
"loss": 0.5063, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.7145762711864407e-06, |
|
"loss": 0.4078, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.7138983050847458e-06, |
|
"loss": 0.6129, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.7132203389830508e-06, |
|
"loss": 0.5158, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.7125423728813559e-06, |
|
"loss": 0.6836, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.7118644067796611e-06, |
|
"loss": 0.4219, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.711186440677966e-06, |
|
"loss": 0.503, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.710508474576271e-06, |
|
"loss": 0.5149, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.709830508474576e-06, |
|
"loss": 0.5265, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.7091525423728811e-06, |
|
"loss": 0.3986, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 1.7084745762711862e-06, |
|
"loss": 0.6311, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.7077966101694915e-06, |
|
"loss": 0.4982, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.7071186440677965e-06, |
|
"loss": 0.4812, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.7064406779661016e-06, |
|
"loss": 0.4371, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.7057627118644066e-06, |
|
"loss": 0.2828, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.7050847457627117e-06, |
|
"loss": 0.4966, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.704406779661017e-06, |
|
"loss": 0.487, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.703728813559322e-06, |
|
"loss": 0.48, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.703050847457627e-06, |
|
"loss": 0.53, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.7023728813559321e-06, |
|
"loss": 0.4308, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.7016949152542372e-06, |
|
"loss": 0.4344, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.7010169491525424e-06, |
|
"loss": 0.4264, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.7003389830508475e-06, |
|
"loss": 0.3625, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.6996610169491526e-06, |
|
"loss": 0.469, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.6989830508474576e-06, |
|
"loss": 0.5119, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.6983050847457627e-06, |
|
"loss": 0.5313, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.6976271186440677e-06, |
|
"loss": 0.4774, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.6969491525423728e-06, |
|
"loss": 0.6185, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.6962711864406778e-06, |
|
"loss": 0.5242, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.6955932203389829e-06, |
|
"loss": 0.589, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.694915254237288e-06, |
|
"loss": 0.3651, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.694237288135593e-06, |
|
"loss": 0.5152, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.6935593220338983e-06, |
|
"loss": 0.3403, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.6928813559322033e-06, |
|
"loss": 0.4487, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.6922033898305084e-06, |
|
"loss": 0.5165, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.6915254237288134e-06, |
|
"loss": 0.3588, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.6908474576271185e-06, |
|
"loss": 0.36, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.6901694915254238e-06, |
|
"loss": 0.3639, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.6894915254237288e-06, |
|
"loss": 0.3645, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.6888135593220339e-06, |
|
"loss": 0.4184, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.688135593220339e-06, |
|
"loss": 0.3949, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.687457627118644e-06, |
|
"loss": 0.472, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.686779661016949e-06, |
|
"loss": 0.5901, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.6861016949152543e-06, |
|
"loss": 0.5605, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.6854237288135594e-06, |
|
"loss": 0.3559, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.6847457627118644e-06, |
|
"loss": 0.5166, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.6840677966101695e-06, |
|
"loss": 0.4741, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.6833898305084745e-06, |
|
"loss": 0.3698, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.6827118644067796e-06, |
|
"loss": 0.3327, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 1.6820338983050846e-06, |
|
"loss": 0.4358, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.6813559322033897e-06, |
|
"loss": 0.4175, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.6806779661016947e-06, |
|
"loss": 0.5203, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 1.6799999999999998e-06, |
|
"loss": 0.4134, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.6793220338983049e-06, |
|
"loss": 0.4262, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.6786440677966101e-06, |
|
"loss": 0.4193, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 1.6779661016949152e-06, |
|
"loss": 0.4083, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 1.6772881355932202e-06, |
|
"loss": 0.4328, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 1.6766101694915253e-06, |
|
"loss": 0.4179, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 1.6759322033898303e-06, |
|
"loss": 0.4417, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.6752542372881356e-06, |
|
"loss": 0.44, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.6745762711864407e-06, |
|
"loss": 0.389, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 1.6738983050847457e-06, |
|
"loss": 0.4417, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.6732203389830508e-06, |
|
"loss": 0.6049, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.6725423728813558e-06, |
|
"loss": 0.2886, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 1.671864406779661e-06, |
|
"loss": 0.362, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.6711864406779662e-06, |
|
"loss": 0.4893, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.6705084745762712e-06, |
|
"loss": 0.4156, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.6698305084745763e-06, |
|
"loss": 0.4368, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 1.6691525423728813e-06, |
|
"loss": 0.2854, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 1.6684745762711862e-06, |
|
"loss": 0.3816, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 1.6677966101694914e-06, |
|
"loss": 0.3923, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.6671186440677965e-06, |
|
"loss": 0.3821, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.6664406779661015e-06, |
|
"loss": 0.3538, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.6657627118644066e-06, |
|
"loss": 0.4051, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.6650847457627116e-06, |
|
"loss": 0.2688, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.664406779661017e-06, |
|
"loss": 0.4024, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.663728813559322e-06, |
|
"loss": 0.4056, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.663050847457627e-06, |
|
"loss": 0.474, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.662372881355932e-06, |
|
"loss": 0.3274, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.6616949152542371e-06, |
|
"loss": 0.2781, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.6610169491525424e-06, |
|
"loss": 0.5106, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.6603389830508475e-06, |
|
"loss": 0.4192, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.6596610169491525e-06, |
|
"loss": 0.5165, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.6589830508474576e-06, |
|
"loss": 0.3866, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.6583050847457626e-06, |
|
"loss": 0.4865, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.6576271186440679e-06, |
|
"loss": 0.4561, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.656949152542373e-06, |
|
"loss": 0.3234, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.656271186440678e-06, |
|
"loss": 0.3329, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.655593220338983e-06, |
|
"loss": 0.2898, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.654915254237288e-06, |
|
"loss": 0.2935, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.654237288135593e-06, |
|
"loss": 0.5655, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.6535593220338982e-06, |
|
"loss": 0.2198, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.6528813559322033e-06, |
|
"loss": 0.3579, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.6522033898305083e-06, |
|
"loss": 0.4593, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.6515254237288134e-06, |
|
"loss": 0.4252, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.6508474576271184e-06, |
|
"loss": 0.3378, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.6501694915254237e-06, |
|
"loss": 0.3833, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.6494915254237288e-06, |
|
"loss": 0.3362, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.6488135593220338e-06, |
|
"loss": 0.4184, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.6481355932203389e-06, |
|
"loss": 0.4318, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.647457627118644e-06, |
|
"loss": 0.4544, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.646779661016949e-06, |
|
"loss": 0.2803, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.6461016949152543e-06, |
|
"loss": 0.4824, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 1.6454237288135593e-06, |
|
"loss": 0.2617, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.6447457627118644e-06, |
|
"loss": 0.4356, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.6440677966101694e-06, |
|
"loss": 0.3887, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.6433898305084745e-06, |
|
"loss": 0.3377, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.6427118644067797e-06, |
|
"loss": 0.3548, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.6420338983050848e-06, |
|
"loss": 0.3864, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.6413559322033899e-06, |
|
"loss": 0.3168, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.6406779661016947e-06, |
|
"loss": 0.2914, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.6399999999999998e-06, |
|
"loss": 0.3451, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.639322033898305e-06, |
|
"loss": 0.26, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.63864406779661e-06, |
|
"loss": 0.289, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.6379661016949151e-06, |
|
"loss": 0.3175, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.6372881355932202e-06, |
|
"loss": 0.4027, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.6366101694915252e-06, |
|
"loss": 0.4006, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.6359322033898303e-06, |
|
"loss": 0.4482, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.6352542372881356e-06, |
|
"loss": 0.3256, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.6345762711864406e-06, |
|
"loss": 0.2944, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.6338983050847457e-06, |
|
"loss": 0.2837, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.6332203389830507e-06, |
|
"loss": 0.2364, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.6325423728813558e-06, |
|
"loss": 0.3801, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.631864406779661e-06, |
|
"loss": 0.3241, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.6311864406779661e-06, |
|
"loss": 0.3508, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.6305084745762712e-06, |
|
"loss": 0.4068, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.6298305084745762e-06, |
|
"loss": 0.3879, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 1.6291525423728813e-06, |
|
"loss": 0.4832, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.6284745762711865e-06, |
|
"loss": 0.4062, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.6277966101694916e-06, |
|
"loss": 0.3831, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 1.6271186440677967e-06, |
|
"loss": 0.2905, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9041666666666667, |
|
"eval_loss": 0.33033275604248047, |
|
"eval_runtime": 2.1567, |
|
"eval_samples_per_second": 556.397, |
|
"eval_steps_per_second": 4.637, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.6264406779661015e-06, |
|
"loss": 0.3392, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.6257627118644066e-06, |
|
"loss": 0.7027, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.6250847457627116e-06, |
|
"loss": 0.2786, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.6244067796610169e-06, |
|
"loss": 0.5368, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.623728813559322e-06, |
|
"loss": 0.4069, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.623050847457627e-06, |
|
"loss": 0.2945, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.622372881355932e-06, |
|
"loss": 0.2948, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.621694915254237e-06, |
|
"loss": 0.3306, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.6210169491525424e-06, |
|
"loss": 0.3564, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.6203389830508474e-06, |
|
"loss": 0.3317, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.6196610169491525e-06, |
|
"loss": 0.3692, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 1.6189830508474575e-06, |
|
"loss": 0.2354, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.6183050847457626e-06, |
|
"loss": 0.3354, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.6176271186440679e-06, |
|
"loss": 0.4009, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.616949152542373e-06, |
|
"loss": 0.4017, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.616271186440678e-06, |
|
"loss": 0.3982, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.615593220338983e-06, |
|
"loss": 0.3335, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.614915254237288e-06, |
|
"loss": 0.3801, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.6142372881355931e-06, |
|
"loss": 0.1589, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.6135593220338984e-06, |
|
"loss": 0.3561, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.6128813559322035e-06, |
|
"loss": 0.3253, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.6122033898305083e-06, |
|
"loss": 0.3365, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.6115254237288134e-06, |
|
"loss": 0.3027, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.6108474576271184e-06, |
|
"loss": 0.3476, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.6101694915254237e-06, |
|
"loss": 0.3315, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.6094915254237287e-06, |
|
"loss": 0.2791, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.6088135593220338e-06, |
|
"loss": 0.4971, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.6081355932203388e-06, |
|
"loss": 0.3441, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.607457627118644e-06, |
|
"loss": 0.2838, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.6067796610169492e-06, |
|
"loss": 0.4489, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.6061016949152542e-06, |
|
"loss": 0.3462, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.6054237288135593e-06, |
|
"loss": 0.3824, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.6047457627118643e-06, |
|
"loss": 0.3284, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.6040677966101694e-06, |
|
"loss": 0.3177, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.6033898305084744e-06, |
|
"loss": 0.2726, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.6027118644067797e-06, |
|
"loss": 0.285, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.6020338983050848e-06, |
|
"loss": 0.1887, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.6013559322033898e-06, |
|
"loss": 0.3477, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.6006779661016949e-06, |
|
"loss": 0.4365, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.6e-06, |
|
"loss": 0.2508, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.5993220338983052e-06, |
|
"loss": 0.3327, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 1.5986440677966103e-06, |
|
"loss": 0.3568, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.597966101694915e-06, |
|
"loss": 0.3114, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.5972881355932201e-06, |
|
"loss": 0.1868, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.5966101694915252e-06, |
|
"loss": 0.3603, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.5959322033898305e-06, |
|
"loss": 0.3288, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.5952542372881355e-06, |
|
"loss": 0.3535, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.5945762711864406e-06, |
|
"loss": 0.2105, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 1.5938983050847456e-06, |
|
"loss": 0.263, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 1.5932203389830507e-06, |
|
"loss": 0.3083, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 1.5925423728813557e-06, |
|
"loss": 0.3208, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.591864406779661e-06, |
|
"loss": 0.3202, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.591186440677966e-06, |
|
"loss": 0.1551, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 1.5905084745762711e-06, |
|
"loss": 0.3373, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 1.5898305084745762e-06, |
|
"loss": 0.1501, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 1.5891525423728812e-06, |
|
"loss": 0.2839, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 1.5884745762711865e-06, |
|
"loss": 0.223, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 1.5877966101694916e-06, |
|
"loss": 0.2818, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 1.5871186440677966e-06, |
|
"loss": 0.3686, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.5864406779661017e-06, |
|
"loss": 0.186, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 1.5857627118644067e-06, |
|
"loss": 0.1631, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 1.585084745762712e-06, |
|
"loss": 0.1846, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 1.5844067796610168e-06, |
|
"loss": 0.3073, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 1.5837288135593219e-06, |
|
"loss": 0.2635, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 1.583050847457627e-06, |
|
"loss": 0.2, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 1.582372881355932e-06, |
|
"loss": 0.2204, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 1.581694915254237e-06, |
|
"loss": 0.1922, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 1.5810169491525423e-06, |
|
"loss": 0.3237, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 1.5803389830508474e-06, |
|
"loss": 0.3518, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 1.5796610169491524e-06, |
|
"loss": 0.2762, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 1.5789830508474575e-06, |
|
"loss": 0.2683, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 1.5783050847457625e-06, |
|
"loss": 0.2838, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.5776271186440678e-06, |
|
"loss": 0.2398, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.5769491525423729e-06, |
|
"loss": 0.1706, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 1.576271186440678e-06, |
|
"loss": 0.3316, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.575593220338983e-06, |
|
"loss": 0.1924, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.574915254237288e-06, |
|
"loss": 0.4937, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 1.5742372881355933e-06, |
|
"loss": 0.1876, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.5735593220338984e-06, |
|
"loss": 0.3832, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.5728813559322034e-06, |
|
"loss": 0.1886, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 1.5722033898305085e-06, |
|
"loss": 0.2339, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 1.5715254237288135e-06, |
|
"loss": 0.322, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 1.5708474576271186e-06, |
|
"loss": 0.266, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.5701694915254236e-06, |
|
"loss": 0.3082, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.5694915254237287e-06, |
|
"loss": 0.2399, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.5688135593220337e-06, |
|
"loss": 0.3131, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.5681355932203388e-06, |
|
"loss": 0.3412, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 1.5674576271186439e-06, |
|
"loss": 0.2242, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 1.5667796610169491e-06, |
|
"loss": 0.1247, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.5661016949152542e-06, |
|
"loss": 0.2313, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.5654237288135592e-06, |
|
"loss": 0.1711, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.5647457627118643e-06, |
|
"loss": 0.2257, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 1.5640677966101693e-06, |
|
"loss": 0.1452, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.5633898305084746e-06, |
|
"loss": 0.2224, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.5627118644067797e-06, |
|
"loss": 0.2951, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 1.5620338983050847e-06, |
|
"loss": 0.2922, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.5613559322033898e-06, |
|
"loss": 0.1637, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.5606779661016948e-06, |
|
"loss": 0.2558, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 1.5599999999999999e-06, |
|
"loss": 0.3369, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.5593220338983052e-06, |
|
"loss": 0.1788, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.5586440677966102e-06, |
|
"loss": 0.2448, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.5579661016949153e-06, |
|
"loss": 0.2631, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.5572881355932203e-06, |
|
"loss": 0.2031, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.5566101694915254e-06, |
|
"loss": 0.3571, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.5559322033898304e-06, |
|
"loss": 0.4733, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.5552542372881355e-06, |
|
"loss": 0.3868, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.5545762711864405e-06, |
|
"loss": 0.376, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.5538983050847456e-06, |
|
"loss": 0.3649, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.5532203389830507e-06, |
|
"loss": 0.2754, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.5525423728813557e-06, |
|
"loss": 0.2406, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.551864406779661e-06, |
|
"loss": 0.2395, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.551186440677966e-06, |
|
"loss": 0.2258, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.550508474576271e-06, |
|
"loss": 0.1468, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.5498305084745761e-06, |
|
"loss": 0.2688, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.5491525423728812e-06, |
|
"loss": 0.2329, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.5484745762711865e-06, |
|
"loss": 0.1794, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.5477966101694915e-06, |
|
"loss": 0.2069, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.5471186440677966e-06, |
|
"loss": 0.3916, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.5464406779661016e-06, |
|
"loss": 0.1633, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.5457627118644067e-06, |
|
"loss": 0.4608, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.545084745762712e-06, |
|
"loss": 0.3149, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.544406779661017e-06, |
|
"loss": 0.3074, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.543728813559322e-06, |
|
"loss": 0.1666, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.5430508474576271e-06, |
|
"loss": 0.235, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.5423728813559322e-06, |
|
"loss": 0.3463, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 1.541694915254237e-06, |
|
"loss": 0.151, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 1.5410169491525423e-06, |
|
"loss": 0.1855, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 1.5403389830508473e-06, |
|
"loss": 0.2876, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.5396610169491524e-06, |
|
"loss": 0.2788, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.5389830508474574e-06, |
|
"loss": 0.2637, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.5383050847457625e-06, |
|
"loss": 0.2103, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.5376271186440678e-06, |
|
"loss": 0.1924, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 1.5369491525423728e-06, |
|
"loss": 0.2806, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 1.5362711864406779e-06, |
|
"loss": 0.3203, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 1.535593220338983e-06, |
|
"loss": 0.2389, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.534915254237288e-06, |
|
"loss": 0.3997, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.5342372881355933e-06, |
|
"loss": 0.4916, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.5335593220338983e-06, |
|
"loss": 0.3362, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.5328813559322034e-06, |
|
"loss": 0.2262, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.5322033898305084e-06, |
|
"loss": 0.1974, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 1.5315254237288135e-06, |
|
"loss": 0.2708, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 1.5308474576271187e-06, |
|
"loss": 0.141, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 1.5301694915254238e-06, |
|
"loss": 0.1588, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 1.5294915254237289e-06, |
|
"loss": 0.3345, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.528813559322034e-06, |
|
"loss": 0.2939, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.5281355932203388e-06, |
|
"loss": 0.2862, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.5274576271186438e-06, |
|
"loss": 0.1724, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 1.526779661016949e-06, |
|
"loss": 0.2035, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 1.5261016949152541e-06, |
|
"loss": 0.2464, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 1.5254237288135592e-06, |
|
"loss": 0.3118, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9233333333333333, |
|
"eval_loss": 0.24194158613681793, |
|
"eval_runtime": 2.1646, |
|
"eval_samples_per_second": 554.376, |
|
"eval_steps_per_second": 4.62, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 1.5247457627118642e-06, |
|
"loss": 0.3308, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 1.5240677966101693e-06, |
|
"loss": 0.2512, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 1.5233898305084746e-06, |
|
"loss": 0.1865, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 1.5227118644067796e-06, |
|
"loss": 0.3534, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 1.5220338983050847e-06, |
|
"loss": 0.1939, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 1.5213559322033897e-06, |
|
"loss": 0.2233, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 1.5206779661016948e-06, |
|
"loss": 0.2171, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 1.5199999999999998e-06, |
|
"loss": 0.1356, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 1.5193220338983051e-06, |
|
"loss": 0.409, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 1.5186440677966102e-06, |
|
"loss": 0.2212, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 1.5179661016949152e-06, |
|
"loss": 0.2672, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 1.5172881355932203e-06, |
|
"loss": 0.4102, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 1.5166101694915253e-06, |
|
"loss": 0.2248, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 1.5159322033898306e-06, |
|
"loss": 0.2012, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 1.5152542372881357e-06, |
|
"loss": 0.2082, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 1.5145762711864407e-06, |
|
"loss": 0.3749, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 1.5138983050847456e-06, |
|
"loss": 0.2099, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 1.5132203389830506e-06, |
|
"loss": 0.2483, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 1.5125423728813559e-06, |
|
"loss": 0.1525, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 1.511864406779661e-06, |
|
"loss": 0.1175, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 1.511186440677966e-06, |
|
"loss": 0.1789, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 1.510508474576271e-06, |
|
"loss": 0.152, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 1.509830508474576e-06, |
|
"loss": 0.1298, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 1.5091525423728812e-06, |
|
"loss": 0.2071, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 1.5084745762711864e-06, |
|
"loss": 0.1652, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 1.5077966101694915e-06, |
|
"loss": 0.3342, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 1.5071186440677965e-06, |
|
"loss": 0.22, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 1.5064406779661016e-06, |
|
"loss": 0.1648, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 1.5057627118644066e-06, |
|
"loss": 0.2909, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 1.505084745762712e-06, |
|
"loss": 0.1288, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 1.504406779661017e-06, |
|
"loss": 0.2381, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 1.503728813559322e-06, |
|
"loss": 0.1202, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 1.503050847457627e-06, |
|
"loss": 0.2136, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 1.5023728813559321e-06, |
|
"loss": 0.1663, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 1.5016949152542374e-06, |
|
"loss": 0.2027, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 1.5010169491525425e-06, |
|
"loss": 0.2011, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 1.5003389830508475e-06, |
|
"loss": 0.2096, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 1.4996610169491524e-06, |
|
"loss": 0.256, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 1.4989830508474574e-06, |
|
"loss": 0.309, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 1.4983050847457625e-06, |
|
"loss": 0.144, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 1.4976271186440677e-06, |
|
"loss": 0.1867, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 1.4969491525423728e-06, |
|
"loss": 0.2838, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 1.4962711864406778e-06, |
|
"loss": 0.2575, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 1.495593220338983e-06, |
|
"loss": 0.1512, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 1.494915254237288e-06, |
|
"loss": 0.1375, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 1.4942372881355932e-06, |
|
"loss": 0.1081, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 1.4935593220338983e-06, |
|
"loss": 0.2312, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 1.4928813559322033e-06, |
|
"loss": 0.0993, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 1.4922033898305084e-06, |
|
"loss": 0.1649, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 1.4915254237288134e-06, |
|
"loss": 0.182, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 1.4908474576271187e-06, |
|
"loss": 0.5477, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 1.4901694915254238e-06, |
|
"loss": 0.0945, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 1.4894915254237288e-06, |
|
"loss": 0.1891, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 1.4888135593220339e-06, |
|
"loss": 0.1851, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 1.488135593220339e-06, |
|
"loss": 0.1563, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 1.487457627118644e-06, |
|
"loss": 0.2035, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 1.4867796610169493e-06, |
|
"loss": 0.0855, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 1.4861016949152543e-06, |
|
"loss": 0.2168, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 1.4854237288135592e-06, |
|
"loss": 0.1785, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 1.4847457627118642e-06, |
|
"loss": 0.1845, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 1.4840677966101693e-06, |
|
"loss": 0.2782, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 1.4833898305084745e-06, |
|
"loss": 0.1818, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 1.4827118644067796e-06, |
|
"loss": 0.1847, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 1.4820338983050846e-06, |
|
"loss": 0.5401, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 1.4813559322033897e-06, |
|
"loss": 0.2525, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 1.4806779661016948e-06, |
|
"loss": 0.311, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 1.48e-06, |
|
"loss": 0.1483, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 1.479322033898305e-06, |
|
"loss": 0.1421, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 1.4786440677966101e-06, |
|
"loss": 0.26, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 1.4779661016949152e-06, |
|
"loss": 0.3372, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 1.4772881355932202e-06, |
|
"loss": 0.242, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 1.4766101694915253e-06, |
|
"loss": 0.0846, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 1.4759322033898306e-06, |
|
"loss": 0.1841, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 1.4752542372881356e-06, |
|
"loss": 0.3118, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 1.4745762711864407e-06, |
|
"loss": 0.1246, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 1.4738983050847457e-06, |
|
"loss": 0.1171, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 1.4732203389830508e-06, |
|
"loss": 0.1163, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 1.472542372881356e-06, |
|
"loss": 0.1439, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 1.471864406779661e-06, |
|
"loss": 0.3402, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 1.471186440677966e-06, |
|
"loss": 0.2218, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 1.470508474576271e-06, |
|
"loss": 0.0947, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 1.469830508474576e-06, |
|
"loss": 0.1727, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 1.4691525423728811e-06, |
|
"loss": 0.2279, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 1.4684745762711864e-06, |
|
"loss": 0.1859, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 1.4677966101694914e-06, |
|
"loss": 0.3139, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 1.4671186440677965e-06, |
|
"loss": 0.1925, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 1.4664406779661015e-06, |
|
"loss": 0.2804, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 1.4657627118644066e-06, |
|
"loss": 0.2243, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 1.4650847457627119e-06, |
|
"loss": 0.1766, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 1.464406779661017e-06, |
|
"loss": 0.1647, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 1.463728813559322e-06, |
|
"loss": 0.329, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 1.463050847457627e-06, |
|
"loss": 0.1023, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 1.462372881355932e-06, |
|
"loss": 0.1247, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 1.4616949152542374e-06, |
|
"loss": 0.1611, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 1.4610169491525424e-06, |
|
"loss": 0.1313, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 1.4603389830508475e-06, |
|
"loss": 0.1791, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 1.4596610169491525e-06, |
|
"loss": 0.1911, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 1.4589830508474576e-06, |
|
"loss": 0.1199, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 1.4583050847457628e-06, |
|
"loss": 0.2245, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 1.4576271186440677e-06, |
|
"loss": 0.1106, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 1.4569491525423727e-06, |
|
"loss": 0.1827, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 1.4562711864406778e-06, |
|
"loss": 0.2192, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 1.4555932203389829e-06, |
|
"loss": 0.2889, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 1.454915254237288e-06, |
|
"loss": 0.2722, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 1.4542372881355932e-06, |
|
"loss": 0.222, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 1.4535593220338982e-06, |
|
"loss": 0.1417, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 1.4528813559322033e-06, |
|
"loss": 0.3165, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 1.4522033898305083e-06, |
|
"loss": 0.1703, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 1.4515254237288134e-06, |
|
"loss": 0.1312, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 1.4508474576271187e-06, |
|
"loss": 0.1586, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 1.4501694915254237e-06, |
|
"loss": 0.3892, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 1.4494915254237288e-06, |
|
"loss": 0.2609, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 1.4488135593220338e-06, |
|
"loss": 0.1844, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 1.4481355932203389e-06, |
|
"loss": 0.2793, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 1.4474576271186442e-06, |
|
"loss": 0.2283, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 1.4467796610169492e-06, |
|
"loss": 0.2165, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 1.4461016949152543e-06, |
|
"loss": 0.2505, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 1.4454237288135593e-06, |
|
"loss": 0.1226, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 1.4447457627118644e-06, |
|
"loss": 0.2771, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 1.4440677966101694e-06, |
|
"loss": 0.1191, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 1.4433898305084745e-06, |
|
"loss": 0.2561, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 1.4427118644067795e-06, |
|
"loss": 0.2066, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 1.4420338983050846e-06, |
|
"loss": 0.2358, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 1.4413559322033897e-06, |
|
"loss": 0.1005, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 1.4406779661016947e-06, |
|
"loss": 0.0813, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 1.44e-06, |
|
"loss": 0.1356, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 1.439322033898305e-06, |
|
"loss": 0.1793, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 1.43864406779661e-06, |
|
"loss": 0.116, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 1.4379661016949151e-06, |
|
"loss": 0.3809, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 1.4372881355932202e-06, |
|
"loss": 0.3438, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 1.4366101694915253e-06, |
|
"loss": 0.1055, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 1.4359322033898305e-06, |
|
"loss": 0.1812, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 1.4352542372881356e-06, |
|
"loss": 0.2359, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 1.4345762711864406e-06, |
|
"loss": 0.2917, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 1.4338983050847457e-06, |
|
"loss": 0.188, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 1.4332203389830507e-06, |
|
"loss": 0.2786, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 1.432542372881356e-06, |
|
"loss": 0.272, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 1.431864406779661e-06, |
|
"loss": 0.2009, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 1.4311864406779661e-06, |
|
"loss": 0.1849, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 1.4305084745762712e-06, |
|
"loss": 0.2114, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 1.4298305084745762e-06, |
|
"loss": 0.198, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 1.4291525423728813e-06, |
|
"loss": 0.2431, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 1.4284745762711863e-06, |
|
"loss": 0.2313, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 1.4277966101694914e-06, |
|
"loss": 0.4397, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 1.4271186440677965e-06, |
|
"loss": 0.2812, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 1.4264406779661015e-06, |
|
"loss": 0.3551, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 1.4257627118644066e-06, |
|
"loss": 0.2413, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 1.4250847457627118e-06, |
|
"loss": 0.1721, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 1.4244067796610169e-06, |
|
"loss": 0.228, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 1.423728813559322e-06, |
|
"loss": 0.2364, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.9275, |
|
"eval_loss": 0.20115427672863007, |
|
"eval_runtime": 2.1625, |
|
"eval_samples_per_second": 554.901, |
|
"eval_steps_per_second": 4.624, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 1.423050847457627e-06, |
|
"loss": 0.0924, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 1.422372881355932e-06, |
|
"loss": 0.3558, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 1.4216949152542373e-06, |
|
"loss": 0.1723, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.4210169491525424e-06, |
|
"loss": 0.199, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.4203389830508474e-06, |
|
"loss": 0.2078, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 1.4196610169491525e-06, |
|
"loss": 0.0834, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.4189830508474575e-06, |
|
"loss": 0.1201, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.4183050847457628e-06, |
|
"loss": 0.1316, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 1.4176271186440679e-06, |
|
"loss": 0.1494, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 1.416949152542373e-06, |
|
"loss": 0.0932, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 1.416271186440678e-06, |
|
"loss": 0.2139, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 1.415593220338983e-06, |
|
"loss": 0.1086, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.4149152542372879e-06, |
|
"loss": 0.2021, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.4142372881355931e-06, |
|
"loss": 0.1413, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 1.4135593220338982e-06, |
|
"loss": 0.1648, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 1.4128813559322032e-06, |
|
"loss": 0.1543, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 1.4122033898305083e-06, |
|
"loss": 0.1439, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 1.4115254237288134e-06, |
|
"loss": 0.0855, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 1.4108474576271186e-06, |
|
"loss": 0.1591, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 1.4101694915254237e-06, |
|
"loss": 0.1353, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.4094915254237287e-06, |
|
"loss": 0.1186, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 1.4088135593220338e-06, |
|
"loss": 0.33, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 1.4081355932203388e-06, |
|
"loss": 0.1859, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 1.4074576271186441e-06, |
|
"loss": 0.2789, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.4067796610169492e-06, |
|
"loss": 0.1976, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.4061016949152542e-06, |
|
"loss": 0.2264, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 1.4054237288135593e-06, |
|
"loss": 0.1494, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 1.4047457627118643e-06, |
|
"loss": 0.2535, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 1.4040677966101694e-06, |
|
"loss": 0.1734, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 1.4033898305084747e-06, |
|
"loss": 0.0785, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 1.4027118644067797e-06, |
|
"loss": 0.1786, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 1.4020338983050848e-06, |
|
"loss": 0.1981, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.4013559322033896e-06, |
|
"loss": 0.3671, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 1.4006779661016947e-06, |
|
"loss": 0.133, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 1.4e-06, |
|
"loss": 0.1537, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 1.399322033898305e-06, |
|
"loss": 0.2066, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 1.39864406779661e-06, |
|
"loss": 0.197, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 1.397966101694915e-06, |
|
"loss": 0.1113, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 1.3972881355932202e-06, |
|
"loss": 0.196, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 1.3966101694915254e-06, |
|
"loss": 0.16, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 1.3959322033898305e-06, |
|
"loss": 0.3039, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 1.3952542372881355e-06, |
|
"loss": 0.3009, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.3945762711864406e-06, |
|
"loss": 0.2367, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.3938983050847456e-06, |
|
"loss": 0.1706, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 1.3932203389830507e-06, |
|
"loss": 0.0864, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 1.392542372881356e-06, |
|
"loss": 0.1151, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 1.391864406779661e-06, |
|
"loss": 0.1147, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 1.391186440677966e-06, |
|
"loss": 0.1549, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.3905084745762711e-06, |
|
"loss": 0.1189, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.3898305084745762e-06, |
|
"loss": 0.1248, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 1.3891525423728815e-06, |
|
"loss": 0.155, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 1.3884745762711865e-06, |
|
"loss": 0.1817, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 1.3877966101694916e-06, |
|
"loss": 0.076, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 1.3871186440677964e-06, |
|
"loss": 0.1318, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 1.3864406779661015e-06, |
|
"loss": 0.2379, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 1.3857627118644067e-06, |
|
"loss": 0.1208, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 1.3850847457627118e-06, |
|
"loss": 0.2364, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.3844067796610168e-06, |
|
"loss": 0.3038, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.383728813559322e-06, |
|
"loss": 0.1233, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 1.383050847457627e-06, |
|
"loss": 0.1563, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.382372881355932e-06, |
|
"loss": 0.3739, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.3816949152542373e-06, |
|
"loss": 0.1857, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.3810169491525423e-06, |
|
"loss": 0.0931, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.3803389830508474e-06, |
|
"loss": 0.3036, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.3796610169491524e-06, |
|
"loss": 0.1429, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.3789830508474575e-06, |
|
"loss": 0.3071, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.3783050847457628e-06, |
|
"loss": 0.2532, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.3776271186440678e-06, |
|
"loss": 0.05, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.3769491525423729e-06, |
|
"loss": 0.2778, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.376271186440678e-06, |
|
"loss": 0.2016, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.375593220338983e-06, |
|
"loss": 0.1147, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 1.3749152542372883e-06, |
|
"loss": 0.1774, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.3742372881355933e-06, |
|
"loss": 0.2507, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.3735593220338984e-06, |
|
"loss": 0.1127, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.3728813559322032e-06, |
|
"loss": 0.1885, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.3722033898305083e-06, |
|
"loss": 0.2006, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.3715254237288133e-06, |
|
"loss": 0.1305, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.3708474576271186e-06, |
|
"loss": 0.1185, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 1.3701694915254236e-06, |
|
"loss": 0.1554, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 1.3694915254237287e-06, |
|
"loss": 0.1233, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.3688135593220338e-06, |
|
"loss": 0.2261, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.3681355932203388e-06, |
|
"loss": 0.1937, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.367457627118644e-06, |
|
"loss": 0.2992, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.3667796610169491e-06, |
|
"loss": 0.099, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 1.3661016949152542e-06, |
|
"loss": 0.2058, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 1.3654237288135592e-06, |
|
"loss": 0.1922, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.3647457627118643e-06, |
|
"loss": 0.2177, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.3640677966101696e-06, |
|
"loss": 0.2719, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.3633898305084746e-06, |
|
"loss": 0.1461, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.3627118644067797e-06, |
|
"loss": 0.0626, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 1.3620338983050847e-06, |
|
"loss": 0.2347, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 1.3613559322033898e-06, |
|
"loss": 0.1569, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.3606779661016948e-06, |
|
"loss": 0.0765, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.3600000000000001e-06, |
|
"loss": 0.1408, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.3593220338983052e-06, |
|
"loss": 0.1158, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.35864406779661e-06, |
|
"loss": 0.1584, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.357966101694915e-06, |
|
"loss": 0.125, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.3572881355932201e-06, |
|
"loss": 0.2532, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.3566101694915254e-06, |
|
"loss": 0.2595, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.3559322033898304e-06, |
|
"loss": 0.2905, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.3552542372881355e-06, |
|
"loss": 0.2109, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 1.3545762711864406e-06, |
|
"loss": 0.2787, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.3538983050847456e-06, |
|
"loss": 0.1537, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.3532203389830509e-06, |
|
"loss": 0.0606, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.352542372881356e-06, |
|
"loss": 0.1591, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.351864406779661e-06, |
|
"loss": 0.3794, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.351186440677966e-06, |
|
"loss": 0.1112, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 1.350508474576271e-06, |
|
"loss": 0.099, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.3498305084745761e-06, |
|
"loss": 0.2185, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.3491525423728814e-06, |
|
"loss": 0.1865, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.3484745762711865e-06, |
|
"loss": 0.1684, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.3477966101694915e-06, |
|
"loss": 0.1465, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.3471186440677966e-06, |
|
"loss": 0.3698, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.3464406779661016e-06, |
|
"loss": 0.0668, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.345762711864407e-06, |
|
"loss": 0.0842, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.345084745762712e-06, |
|
"loss": 0.058, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.3444067796610168e-06, |
|
"loss": 0.2392, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.3437288135593219e-06, |
|
"loss": 0.1301, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.343050847457627e-06, |
|
"loss": 0.0994, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.342372881355932e-06, |
|
"loss": 0.1115, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.3416949152542372e-06, |
|
"loss": 0.2144, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.3410169491525423e-06, |
|
"loss": 0.1689, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.3403389830508473e-06, |
|
"loss": 0.0722, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.3396610169491524e-06, |
|
"loss": 0.1058, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.3389830508474575e-06, |
|
"loss": 0.2104, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.3383050847457627e-06, |
|
"loss": 0.1704, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.3376271186440678e-06, |
|
"loss": 0.3156, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.3369491525423728e-06, |
|
"loss": 0.1713, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.3362711864406779e-06, |
|
"loss": 0.0866, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 1.335593220338983e-06, |
|
"loss": 0.1016, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 1.3349152542372882e-06, |
|
"loss": 0.1027, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.3342372881355933e-06, |
|
"loss": 0.236, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 1.3335593220338983e-06, |
|
"loss": 0.1598, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 1.3328813559322034e-06, |
|
"loss": 0.227, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 1.3322033898305084e-06, |
|
"loss": 0.1154, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 1.3315254237288137e-06, |
|
"loss": 0.1596, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 1.3308474576271185e-06, |
|
"loss": 0.2688, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.3301694915254236e-06, |
|
"loss": 0.1368, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.3294915254237287e-06, |
|
"loss": 0.2205, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.3288135593220337e-06, |
|
"loss": 0.2239, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 1.3281355932203388e-06, |
|
"loss": 0.1772, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 1.327457627118644e-06, |
|
"loss": 0.0688, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 1.326779661016949e-06, |
|
"loss": 0.1931, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 1.3261016949152541e-06, |
|
"loss": 0.049, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.3254237288135592e-06, |
|
"loss": 0.2912, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.3247457627118643e-06, |
|
"loss": 0.151, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 1.3240677966101695e-06, |
|
"loss": 0.0761, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.3233898305084746e-06, |
|
"loss": 0.1741, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.3227118644067796e-06, |
|
"loss": 0.249, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 1.3220338983050847e-06, |
|
"loss": 0.1217, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.9391666666666667, |
|
"eval_loss": 0.17867614328861237, |
|
"eval_runtime": 2.1693, |
|
"eval_samples_per_second": 553.168, |
|
"eval_steps_per_second": 4.61, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.3213559322033897e-06, |
|
"loss": 0.1033, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.320677966101695e-06, |
|
"loss": 0.1672, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.32e-06, |
|
"loss": 0.097, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.3193220338983051e-06, |
|
"loss": 0.1527, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.3186440677966102e-06, |
|
"loss": 0.0696, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 1.3179661016949152e-06, |
|
"loss": 0.0536, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.3172881355932203e-06, |
|
"loss": 0.1639, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.3166101694915253e-06, |
|
"loss": 0.2985, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 1.3159322033898304e-06, |
|
"loss": 0.1313, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.3152542372881355e-06, |
|
"loss": 0.1431, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.3145762711864405e-06, |
|
"loss": 0.0903, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.3138983050847456e-06, |
|
"loss": 0.0682, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.3132203389830508e-06, |
|
"loss": 0.155, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.3125423728813559e-06, |
|
"loss": 0.1242, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.311864406779661e-06, |
|
"loss": 0.1489, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.311186440677966e-06, |
|
"loss": 0.1878, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.310508474576271e-06, |
|
"loss": 0.1566, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 1.3098305084745761e-06, |
|
"loss": 0.0949, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.3091525423728814e-06, |
|
"loss": 0.1652, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.3084745762711864e-06, |
|
"loss": 0.2099, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 1.3077966101694915e-06, |
|
"loss": 0.0429, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.3071186440677965e-06, |
|
"loss": 0.1763, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.3064406779661016e-06, |
|
"loss": 0.1139, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 1.3057627118644069e-06, |
|
"loss": 0.2101, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.305084745762712e-06, |
|
"loss": 0.1314, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.304406779661017e-06, |
|
"loss": 0.0805, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.303728813559322e-06, |
|
"loss": 0.0727, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 1.303050847457627e-06, |
|
"loss": 0.0858, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 1.3023728813559321e-06, |
|
"loss": 0.2719, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.3016949152542372e-06, |
|
"loss": 0.1048, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.3010169491525423e-06, |
|
"loss": 0.0948, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.3003389830508473e-06, |
|
"loss": 0.182, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 1.2996610169491524e-06, |
|
"loss": 0.222, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.2989830508474574e-06, |
|
"loss": 0.1353, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.2983050847457627e-06, |
|
"loss": 0.0588, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 1.2976271186440677e-06, |
|
"loss": 0.1803, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.2969491525423728e-06, |
|
"loss": 0.27, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.2962711864406779e-06, |
|
"loss": 0.1085, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.295593220338983e-06, |
|
"loss": 0.062, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 1.2949152542372882e-06, |
|
"loss": 0.0562, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 1.2942372881355932e-06, |
|
"loss": 0.099, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.2935593220338983e-06, |
|
"loss": 0.2152, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 1.2928813559322033e-06, |
|
"loss": 0.3026, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 1.2922033898305084e-06, |
|
"loss": 0.1544, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.2915254237288137e-06, |
|
"loss": 0.1499, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.2908474576271187e-06, |
|
"loss": 0.076, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.2901694915254238e-06, |
|
"loss": 0.0523, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 1.2894915254237288e-06, |
|
"loss": 0.1224, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.2888135593220339e-06, |
|
"loss": 0.1076, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.2881355932203387e-06, |
|
"loss": 0.1661, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 1.287457627118644e-06, |
|
"loss": 0.102, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.286779661016949e-06, |
|
"loss": 0.3861, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.286101694915254e-06, |
|
"loss": 0.1388, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 1.2854237288135592e-06, |
|
"loss": 0.0618, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.2847457627118642e-06, |
|
"loss": 0.071, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.2840677966101695e-06, |
|
"loss": 0.2051, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.2833898305084745e-06, |
|
"loss": 0.0579, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.2827118644067796e-06, |
|
"loss": 0.1756, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.2820338983050846e-06, |
|
"loss": 0.2274, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 1.2813559322033897e-06, |
|
"loss": 0.267, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 1.280677966101695e-06, |
|
"loss": 0.0814, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 1.28e-06, |
|
"loss": 0.1732, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 1.279322033898305e-06, |
|
"loss": 0.0912, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 1.2786440677966101e-06, |
|
"loss": 0.2504, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 1.2779661016949152e-06, |
|
"loss": 0.2948, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 1.2772881355932202e-06, |
|
"loss": 0.1412, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 1.2766101694915255e-06, |
|
"loss": 0.2982, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 1.2759322033898306e-06, |
|
"loss": 0.0817, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 1.2752542372881356e-06, |
|
"loss": 0.1372, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.2745762711864405e-06, |
|
"loss": 0.2141, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.2738983050847455e-06, |
|
"loss": 0.2414, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 1.2732203389830508e-06, |
|
"loss": 0.0736, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 1.2725423728813558e-06, |
|
"loss": 0.0489, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 1.271864406779661e-06, |
|
"loss": 0.2414, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.271186440677966e-06, |
|
"loss": 0.0823, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 1.270508474576271e-06, |
|
"loss": 0.1825, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 1.2698305084745763e-06, |
|
"loss": 0.0772, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 1.2691525423728813e-06, |
|
"loss": 0.0974, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 1.2684745762711864e-06, |
|
"loss": 0.1226, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 1.2677966101694914e-06, |
|
"loss": 0.115, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 1.2671186440677965e-06, |
|
"loss": 0.0753, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.2664406779661016e-06, |
|
"loss": 0.0742, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.2657627118644068e-06, |
|
"loss": 0.0477, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 1.2650847457627119e-06, |
|
"loss": 0.0739, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.264406779661017e-06, |
|
"loss": 0.0984, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.263728813559322e-06, |
|
"loss": 0.1167, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 1.263050847457627e-06, |
|
"loss": 0.1581, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.2623728813559323e-06, |
|
"loss": 0.1199, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.2616949152542374e-06, |
|
"loss": 0.0638, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 1.2610169491525424e-06, |
|
"loss": 0.0984, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.2603389830508473e-06, |
|
"loss": 0.1782, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.2596610169491523e-06, |
|
"loss": 0.1572, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 1.2589830508474576e-06, |
|
"loss": 0.2107, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.2583050847457626e-06, |
|
"loss": 0.1769, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.2576271186440677e-06, |
|
"loss": 0.0882, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 1.2569491525423728e-06, |
|
"loss": 0.1487, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 1.2562711864406778e-06, |
|
"loss": 0.0576, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 1.2555932203389829e-06, |
|
"loss": 0.049, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 1.2549152542372881e-06, |
|
"loss": 0.1626, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 1.2542372881355932e-06, |
|
"loss": 0.1257, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 1.2535593220338982e-06, |
|
"loss": 0.0854, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 1.2528813559322033e-06, |
|
"loss": 0.097, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 1.2522033898305084e-06, |
|
"loss": 0.1523, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 1.2515254237288136e-06, |
|
"loss": 0.184, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 1.2508474576271187e-06, |
|
"loss": 0.0491, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 1.2501694915254237e-06, |
|
"loss": 0.3348, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 1.2494915254237288e-06, |
|
"loss": 0.0907, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.2488135593220338e-06, |
|
"loss": 0.2645, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 1.2481355932203391e-06, |
|
"loss": 0.1606, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 1.2474576271186442e-06, |
|
"loss": 0.1083, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 1.2467796610169492e-06, |
|
"loss": 0.1732, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 1.246101694915254e-06, |
|
"loss": 0.1342, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 1.2454237288135591e-06, |
|
"loss": 0.0978, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 1.2447457627118642e-06, |
|
"loss": 0.1686, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 1.2440677966101694e-06, |
|
"loss": 0.0807, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 1.2433898305084745e-06, |
|
"loss": 0.0859, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 1.2427118644067796e-06, |
|
"loss": 0.0708, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.2420338983050846e-06, |
|
"loss": 0.1128, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.2413559322033897e-06, |
|
"loss": 0.209, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 1.240677966101695e-06, |
|
"loss": 0.0853, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.24e-06, |
|
"loss": 0.128, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.239322033898305e-06, |
|
"loss": 0.0852, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 1.23864406779661e-06, |
|
"loss": 0.0496, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 1.2379661016949152e-06, |
|
"loss": 0.1112, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 1.2372881355932204e-06, |
|
"loss": 0.083, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.2366101694915255e-06, |
|
"loss": 0.086, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 1.2359322033898305e-06, |
|
"loss": 0.1494, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 1.2352542372881356e-06, |
|
"loss": 0.0834, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 1.2345762711864406e-06, |
|
"loss": 0.228, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.2338983050847457e-06, |
|
"loss": 0.237, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.233220338983051e-06, |
|
"loss": 0.2149, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.232542372881356e-06, |
|
"loss": 0.0522, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 1.2318644067796609e-06, |
|
"loss": 0.2585, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 1.231186440677966e-06, |
|
"loss": 0.0892, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 1.230508474576271e-06, |
|
"loss": 0.1251, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 1.2298305084745762e-06, |
|
"loss": 0.0866, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 1.2291525423728813e-06, |
|
"loss": 0.2315, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.2284745762711864e-06, |
|
"loss": 0.0673, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 1.2277966101694914e-06, |
|
"loss": 0.1812, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 1.2271186440677965e-06, |
|
"loss": 0.0626, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 1.2264406779661015e-06, |
|
"loss": 0.1914, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 1.2257627118644068e-06, |
|
"loss": 0.084, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 1.2250847457627118e-06, |
|
"loss": 0.1848, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.224406779661017e-06, |
|
"loss": 0.0629, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 1.223728813559322e-06, |
|
"loss": 0.053, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 1.223050847457627e-06, |
|
"loss": 0.1371, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 1.2223728813559323e-06, |
|
"loss": 0.1618, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 1.2216949152542373e-06, |
|
"loss": 0.0804, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 1.2210169491525424e-06, |
|
"loss": 0.1701, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 1.2203389830508474e-06, |
|
"loss": 0.2239, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.9408333333333333, |
|
"eval_loss": 0.16648565232753754, |
|
"eval_runtime": 2.1629, |
|
"eval_samples_per_second": 554.823, |
|
"eval_steps_per_second": 4.624, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 1.2196610169491525e-06, |
|
"loss": 0.0578, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 1.2189830508474578e-06, |
|
"loss": 0.2605, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 1.2183050847457626e-06, |
|
"loss": 0.0664, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 1.2176271186440677e-06, |
|
"loss": 0.0592, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 1.2169491525423727e-06, |
|
"loss": 0.2028, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 1.2162711864406778e-06, |
|
"loss": 0.05, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 1.2155932203389828e-06, |
|
"loss": 0.0889, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 1.214915254237288e-06, |
|
"loss": 0.059, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 1.2142372881355931e-06, |
|
"loss": 0.1146, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 1.2135593220338982e-06, |
|
"loss": 0.0448, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 1.2128813559322033e-06, |
|
"loss": 0.1012, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 1.2122033898305083e-06, |
|
"loss": 0.1311, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 1.2115254237288136e-06, |
|
"loss": 0.1254, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 1.2108474576271186e-06, |
|
"loss": 0.2726, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 1.2101694915254237e-06, |
|
"loss": 0.0499, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 1.2094915254237287e-06, |
|
"loss": 0.1829, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 1.2088135593220338e-06, |
|
"loss": 0.0689, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 1.208135593220339e-06, |
|
"loss": 0.1835, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 1.2074576271186441e-06, |
|
"loss": 0.1104, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 1.2067796610169492e-06, |
|
"loss": 0.0912, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 1.2061016949152542e-06, |
|
"loss": 0.1757, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 1.2054237288135593e-06, |
|
"loss": 0.0958, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 1.2047457627118646e-06, |
|
"loss": 0.1525, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 1.2040677966101694e-06, |
|
"loss": 0.069, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 1.2033898305084745e-06, |
|
"loss": 0.1472, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 1.2027118644067795e-06, |
|
"loss": 0.1457, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 1.2020338983050846e-06, |
|
"loss": 0.0579, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 1.2013559322033896e-06, |
|
"loss": 0.049, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 1.2006779661016949e-06, |
|
"loss": 0.2624, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 1.2e-06, |
|
"loss": 0.2189, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 1.199322033898305e-06, |
|
"loss": 0.0755, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 1.19864406779661e-06, |
|
"loss": 0.1616, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 1.1979661016949151e-06, |
|
"loss": 0.0491, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 1.1972881355932204e-06, |
|
"loss": 0.1849, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 1.1966101694915254e-06, |
|
"loss": 0.0913, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 1.1959322033898305e-06, |
|
"loss": 0.12, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 1.1952542372881355e-06, |
|
"loss": 0.0723, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 1.1945762711864406e-06, |
|
"loss": 0.0674, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 1.1938983050847457e-06, |
|
"loss": 0.0437, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 1.193220338983051e-06, |
|
"loss": 0.0451, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 1.192542372881356e-06, |
|
"loss": 0.0546, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 1.191864406779661e-06, |
|
"loss": 0.1625, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 1.191186440677966e-06, |
|
"loss": 0.0842, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 1.1905084745762711e-06, |
|
"loss": 0.0743, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 1.1898305084745762e-06, |
|
"loss": 0.1086, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 1.1891525423728813e-06, |
|
"loss": 0.0691, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 1.1884745762711863e-06, |
|
"loss": 0.0501, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 1.1877966101694914e-06, |
|
"loss": 0.1493, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 1.1871186440677964e-06, |
|
"loss": 0.1773, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 1.1864406779661017e-06, |
|
"loss": 0.0371, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 1.1857627118644067e-06, |
|
"loss": 0.0545, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 1.1850847457627118e-06, |
|
"loss": 0.2342, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 1.1844067796610169e-06, |
|
"loss": 0.0701, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 1.183728813559322e-06, |
|
"loss": 0.0812, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 1.183050847457627e-06, |
|
"loss": 0.0415, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 1.1823728813559322e-06, |
|
"loss": 0.2034, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 1.1816949152542373e-06, |
|
"loss": 0.1265, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 1.1810169491525423e-06, |
|
"loss": 0.0876, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 1.1803389830508474e-06, |
|
"loss": 0.111, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 1.1796610169491525e-06, |
|
"loss": 0.0849, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 1.1789830508474577e-06, |
|
"loss": 0.2333, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 1.1783050847457628e-06, |
|
"loss": 0.0608, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 1.1776271186440678e-06, |
|
"loss": 0.1015, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 1.1769491525423729e-06, |
|
"loss": 0.0737, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 1.176271186440678e-06, |
|
"loss": 0.1042, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 1.175593220338983e-06, |
|
"loss": 0.0406, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 1.174915254237288e-06, |
|
"loss": 0.1145, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 1.1742372881355931e-06, |
|
"loss": 0.1783, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 1.1735593220338982e-06, |
|
"loss": 0.0428, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 1.1728813559322032e-06, |
|
"loss": 0.128, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 1.1722033898305083e-06, |
|
"loss": 0.0431, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 1.1715254237288135e-06, |
|
"loss": 0.3058, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 1.1708474576271186e-06, |
|
"loss": 0.0724, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 1.1701694915254237e-06, |
|
"loss": 0.1464, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 1.1694915254237287e-06, |
|
"loss": 0.1469, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 1.1688135593220338e-06, |
|
"loss": 0.2153, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 1.168135593220339e-06, |
|
"loss": 0.1105, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 1.167457627118644e-06, |
|
"loss": 0.0811, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 1.1667796610169491e-06, |
|
"loss": 0.1187, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 1.1661016949152542e-06, |
|
"loss": 0.1969, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 1.1654237288135593e-06, |
|
"loss": 0.1847, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 1.1647457627118645e-06, |
|
"loss": 0.1981, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 1.1640677966101696e-06, |
|
"loss": 0.0518, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 1.1633898305084746e-06, |
|
"loss": 0.1896, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 1.1627118644067797e-06, |
|
"loss": 0.0507, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 1.1620338983050847e-06, |
|
"loss": 0.0714, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 1.1613559322033896e-06, |
|
"loss": 0.1415, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 1.1606779661016948e-06, |
|
"loss": 0.0575, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 1.16e-06, |
|
"loss": 0.0807, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 1.159322033898305e-06, |
|
"loss": 0.0522, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 1.15864406779661e-06, |
|
"loss": 0.3148, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 1.157966101694915e-06, |
|
"loss": 0.1748, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 1.1572881355932203e-06, |
|
"loss": 0.0825, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 1.1566101694915254e-06, |
|
"loss": 0.2799, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 1.1559322033898304e-06, |
|
"loss": 0.1794, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 1.1552542372881355e-06, |
|
"loss": 0.1373, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 1.1545762711864406e-06, |
|
"loss": 0.0965, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 1.1538983050847458e-06, |
|
"loss": 0.2499, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 1.1532203389830509e-06, |
|
"loss": 0.0718, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 1.152542372881356e-06, |
|
"loss": 0.226, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 1.151864406779661e-06, |
|
"loss": 0.0687, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 1.151186440677966e-06, |
|
"loss": 0.1217, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 1.150508474576271e-06, |
|
"loss": 0.0572, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 1.1498305084745764e-06, |
|
"loss": 0.3177, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 1.1491525423728814e-06, |
|
"loss": 0.0489, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 1.1484745762711865e-06, |
|
"loss": 0.0623, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 1.1477966101694913e-06, |
|
"loss": 0.0568, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 1.1471186440677964e-06, |
|
"loss": 0.131, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 1.1464406779661016e-06, |
|
"loss": 0.0687, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 1.1457627118644067e-06, |
|
"loss": 0.1849, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 1.1450847457627118e-06, |
|
"loss": 0.0848, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 1.1444067796610168e-06, |
|
"loss": 0.1458, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 1.1437288135593219e-06, |
|
"loss": 0.1349, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 1.1430508474576271e-06, |
|
"loss": 0.0331, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 1.1423728813559322e-06, |
|
"loss": 0.154, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 1.1416949152542372e-06, |
|
"loss": 0.0355, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 1.1410169491525423e-06, |
|
"loss": 0.0741, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 1.1403389830508474e-06, |
|
"loss": 0.0332, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 1.1396610169491524e-06, |
|
"loss": 0.0633, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 1.1389830508474577e-06, |
|
"loss": 0.2599, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 1.1383050847457627e-06, |
|
"loss": 0.1882, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 1.1376271186440678e-06, |
|
"loss": 0.1333, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 1.1369491525423728e-06, |
|
"loss": 0.1178, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 1.136271186440678e-06, |
|
"loss": 0.2969, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 1.1355932203389832e-06, |
|
"loss": 0.2684, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 1.1349152542372882e-06, |
|
"loss": 0.0795, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 1.1342372881355933e-06, |
|
"loss": 0.1673, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 1.1335593220338981e-06, |
|
"loss": 0.1067, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 1.1328813559322032e-06, |
|
"loss": 0.0918, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 1.1322033898305082e-06, |
|
"loss": 0.1004, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 1.1315254237288135e-06, |
|
"loss": 0.1313, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 1.1308474576271186e-06, |
|
"loss": 0.055, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 1.1301694915254236e-06, |
|
"loss": 0.1359, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 1.1294915254237287e-06, |
|
"loss": 0.0309, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 1.1288135593220337e-06, |
|
"loss": 0.0437, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 1.128135593220339e-06, |
|
"loss": 0.1121, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 1.127457627118644e-06, |
|
"loss": 0.0813, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 1.126779661016949e-06, |
|
"loss": 0.0731, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 1.1261016949152542e-06, |
|
"loss": 0.2019, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 1.1254237288135592e-06, |
|
"loss": 0.4634, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 1.1247457627118645e-06, |
|
"loss": 0.0817, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 1.1240677966101695e-06, |
|
"loss": 0.1337, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 1.1233898305084746e-06, |
|
"loss": 0.0505, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 1.1227118644067796e-06, |
|
"loss": 0.1064, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 1.1220338983050847e-06, |
|
"loss": 0.102, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 1.12135593220339e-06, |
|
"loss": 0.0968, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 1.120677966101695e-06, |
|
"loss": 0.0588, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 1.12e-06, |
|
"loss": 0.1623, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 1.119322033898305e-06, |
|
"loss": 0.0739, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 1.11864406779661e-06, |
|
"loss": 0.1104, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.9466666666666667, |
|
"eval_loss": 0.15664003789424896, |
|
"eval_runtime": 2.163, |
|
"eval_samples_per_second": 554.797, |
|
"eval_steps_per_second": 4.623, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 1.117966101694915e-06, |
|
"loss": 0.1003, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 1.1172881355932203e-06, |
|
"loss": 0.0391, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 1.1166101694915254e-06, |
|
"loss": 0.1251, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 1.1159322033898304e-06, |
|
"loss": 0.1441, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 1.1152542372881355e-06, |
|
"loss": 0.0324, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 1.1145762711864405e-06, |
|
"loss": 0.3081, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 1.1138983050847458e-06, |
|
"loss": 0.0455, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 1.1132203389830508e-06, |
|
"loss": 0.0934, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 1.112542372881356e-06, |
|
"loss": 0.0865, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 1.111864406779661e-06, |
|
"loss": 0.2466, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 1.111186440677966e-06, |
|
"loss": 0.0347, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 1.1105084745762713e-06, |
|
"loss": 0.3363, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 1.1098305084745763e-06, |
|
"loss": 0.0317, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 1.1091525423728814e-06, |
|
"loss": 0.0699, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 1.1084745762711864e-06, |
|
"loss": 0.1004, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 1.1077966101694915e-06, |
|
"loss": 0.1698, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 1.1071186440677966e-06, |
|
"loss": 0.2511, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 1.1064406779661018e-06, |
|
"loss": 0.0314, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 1.1057627118644069e-06, |
|
"loss": 0.2047, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 1.1050847457627117e-06, |
|
"loss": 0.0611, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 1.1044067796610168e-06, |
|
"loss": 0.0405, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 1.1037288135593218e-06, |
|
"loss": 0.0874, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 1.103050847457627e-06, |
|
"loss": 0.0307, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 1.1023728813559322e-06, |
|
"loss": 0.1837, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 1.1016949152542372e-06, |
|
"loss": 0.0504, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 1.1010169491525423e-06, |
|
"loss": 0.0599, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 1.1003389830508473e-06, |
|
"loss": 0.2235, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 1.0996610169491524e-06, |
|
"loss": 0.0907, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 1.0989830508474576e-06, |
|
"loss": 0.0383, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 1.0983050847457627e-06, |
|
"loss": 0.0758, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 1.0976271186440677e-06, |
|
"loss": 0.1179, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 1.0969491525423728e-06, |
|
"loss": 0.185, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 1.0962711864406779e-06, |
|
"loss": 0.0848, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 1.0955932203389831e-06, |
|
"loss": 0.0453, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 1.0949152542372882e-06, |
|
"loss": 0.2184, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 1.0942372881355932e-06, |
|
"loss": 0.0544, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 1.0935593220338983e-06, |
|
"loss": 0.1797, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 1.0928813559322033e-06, |
|
"loss": 0.1055, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 1.0922033898305086e-06, |
|
"loss": 0.085, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 1.0915254237288135e-06, |
|
"loss": 0.0641, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 1.0908474576271185e-06, |
|
"loss": 0.0557, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 1.0901694915254236e-06, |
|
"loss": 0.151, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 1.0894915254237286e-06, |
|
"loss": 0.12, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 1.0888135593220337e-06, |
|
"loss": 0.1, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 1.088135593220339e-06, |
|
"loss": 0.1123, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 1.087457627118644e-06, |
|
"loss": 0.0301, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 1.086779661016949e-06, |
|
"loss": 0.0313, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 1.0861016949152541e-06, |
|
"loss": 0.2272, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 1.0854237288135592e-06, |
|
"loss": 0.2015, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 1.0847457627118644e-06, |
|
"loss": 0.1647, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 1.0840677966101695e-06, |
|
"loss": 0.1699, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 1.0833898305084745e-06, |
|
"loss": 0.1895, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 1.0827118644067796e-06, |
|
"loss": 0.0597, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 1.0820338983050847e-06, |
|
"loss": 0.0582, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 1.08135593220339e-06, |
|
"loss": 0.1196, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 1.080677966101695e-06, |
|
"loss": 0.1069, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 1.08e-06, |
|
"loss": 0.1025, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 1.079322033898305e-06, |
|
"loss": 0.1306, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 1.0786440677966101e-06, |
|
"loss": 0.0443, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 1.0779661016949154e-06, |
|
"loss": 0.2391, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 1.0772881355932203e-06, |
|
"loss": 0.0477, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 1.0766101694915253e-06, |
|
"loss": 0.058, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 1.0759322033898304e-06, |
|
"loss": 0.0741, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 1.0752542372881354e-06, |
|
"loss": 0.1817, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 1.0745762711864405e-06, |
|
"loss": 0.0343, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 1.0738983050847457e-06, |
|
"loss": 0.1451, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 1.0732203389830508e-06, |
|
"loss": 0.1805, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 1.0725423728813559e-06, |
|
"loss": 0.079, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 1.071864406779661e-06, |
|
"loss": 0.222, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 1.071186440677966e-06, |
|
"loss": 0.0854, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 1.0705084745762712e-06, |
|
"loss": 0.0296, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 1.0698305084745763e-06, |
|
"loss": 0.1856, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 1.0691525423728813e-06, |
|
"loss": 0.0282, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 1.0684745762711864e-06, |
|
"loss": 0.2331, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 1.0677966101694915e-06, |
|
"loss": 0.0268, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 1.0671186440677965e-06, |
|
"loss": 0.0301, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 1.0664406779661018e-06, |
|
"loss": 0.0385, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 1.0657627118644068e-06, |
|
"loss": 0.2385, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 1.0650847457627119e-06, |
|
"loss": 0.1927, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 1.064406779661017e-06, |
|
"loss": 0.2379, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 1.063728813559322e-06, |
|
"loss": 0.0843, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 1.063050847457627e-06, |
|
"loss": 0.1116, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 1.0623728813559321e-06, |
|
"loss": 0.2229, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 1.0616949152542372e-06, |
|
"loss": 0.2296, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 1.0610169491525422e-06, |
|
"loss": 0.1429, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 1.0603389830508473e-06, |
|
"loss": 0.0599, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 1.0596610169491525e-06, |
|
"loss": 0.0424, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 1.0589830508474576e-06, |
|
"loss": 0.1371, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 1.0583050847457627e-06, |
|
"loss": 0.1929, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 1.0576271186440677e-06, |
|
"loss": 0.1231, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 1.0569491525423728e-06, |
|
"loss": 0.1825, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 1.0562711864406778e-06, |
|
"loss": 0.0358, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 1.055593220338983e-06, |
|
"loss": 0.052, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 1.0549152542372881e-06, |
|
"loss": 0.1638, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 1.0542372881355932e-06, |
|
"loss": 0.0853, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 1.0535593220338983e-06, |
|
"loss": 0.2089, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 1.0528813559322033e-06, |
|
"loss": 0.0909, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 1.0522033898305086e-06, |
|
"loss": 0.0385, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 1.0515254237288136e-06, |
|
"loss": 0.0395, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 1.0508474576271187e-06, |
|
"loss": 0.0251, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 1.0501694915254237e-06, |
|
"loss": 0.0471, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 1.0494915254237288e-06, |
|
"loss": 0.0989, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 1.0488135593220339e-06, |
|
"loss": 0.0657, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 1.048135593220339e-06, |
|
"loss": 0.0321, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 1.047457627118644e-06, |
|
"loss": 0.0272, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 1.046779661016949e-06, |
|
"loss": 0.131, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 1.046101694915254e-06, |
|
"loss": 0.1074, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 1.0454237288135591e-06, |
|
"loss": 0.0996, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 1.0447457627118644e-06, |
|
"loss": 0.0533, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 1.0440677966101695e-06, |
|
"loss": 0.036, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 1.0433898305084745e-06, |
|
"loss": 0.1837, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 1.0427118644067796e-06, |
|
"loss": 0.0418, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 1.0420338983050846e-06, |
|
"loss": 0.0848, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 1.0413559322033899e-06, |
|
"loss": 0.1811, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 1.040677966101695e-06, |
|
"loss": 0.1128, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 1.04e-06, |
|
"loss": 0.1276, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 1.039322033898305e-06, |
|
"loss": 0.0564, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 1.0386440677966101e-06, |
|
"loss": 0.0403, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 1.0379661016949154e-06, |
|
"loss": 0.1912, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 1.0372881355932204e-06, |
|
"loss": 0.0548, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 1.0366101694915255e-06, |
|
"loss": 0.043, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 1.0359322033898305e-06, |
|
"loss": 0.0497, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 1.0352542372881356e-06, |
|
"loss": 0.1063, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 1.0345762711864404e-06, |
|
"loss": 0.0593, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 1.0338983050847457e-06, |
|
"loss": 0.1648, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 1.0332203389830508e-06, |
|
"loss": 0.033, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 1.0325423728813558e-06, |
|
"loss": 0.0263, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 1.0318644067796609e-06, |
|
"loss": 0.0807, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 1.031186440677966e-06, |
|
"loss": 0.0287, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 1.0305084745762712e-06, |
|
"loss": 0.0417, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 1.0298305084745762e-06, |
|
"loss": 0.1855, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 1.0291525423728813e-06, |
|
"loss": 0.0682, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 1.0284745762711864e-06, |
|
"loss": 0.1175, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 1.0277966101694914e-06, |
|
"loss": 0.1745, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 1.0271186440677967e-06, |
|
"loss": 0.0323, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 1.0264406779661017e-06, |
|
"loss": 0.0698, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 1.0257627118644068e-06, |
|
"loss": 0.118, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 1.0250847457627118e-06, |
|
"loss": 0.0233, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 1.024406779661017e-06, |
|
"loss": 0.0805, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 1.023728813559322e-06, |
|
"loss": 0.1352, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 1.0230508474576272e-06, |
|
"loss": 0.0255, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 1.0223728813559323e-06, |
|
"loss": 0.0864, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 1.0216949152542373e-06, |
|
"loss": 0.0446, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 1.0210169491525422e-06, |
|
"loss": 0.1036, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 1.0203389830508472e-06, |
|
"loss": 0.0254, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 1.0196610169491525e-06, |
|
"loss": 0.2954, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 1.0189830508474576e-06, |
|
"loss": 0.2212, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 1.0183050847457626e-06, |
|
"loss": 0.0817, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 1.0176271186440677e-06, |
|
"loss": 0.0306, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 1.0169491525423727e-06, |
|
"loss": 0.1143, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.945, |
|
"eval_loss": 0.15177254378795624, |
|
"eval_runtime": 2.1691, |
|
"eval_samples_per_second": 553.236, |
|
"eval_steps_per_second": 4.61, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"learning_rate": 1.016271186440678e-06, |
|
"loss": 0.0557, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"learning_rate": 1.015593220338983e-06, |
|
"loss": 0.1567, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"learning_rate": 1.014915254237288e-06, |
|
"loss": 0.0305, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 1.0142372881355932e-06, |
|
"loss": 0.1316, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 1.0135593220338982e-06, |
|
"loss": 0.1483, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"learning_rate": 1.0128813559322033e-06, |
|
"loss": 0.1625, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"learning_rate": 1.0122033898305085e-06, |
|
"loss": 0.0724, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"learning_rate": 1.0115254237288136e-06, |
|
"loss": 0.0909, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"learning_rate": 1.0108474576271186e-06, |
|
"loss": 0.035, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"learning_rate": 1.0101694915254237e-06, |
|
"loss": 0.0409, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"learning_rate": 1.0094915254237288e-06, |
|
"loss": 0.0768, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 1.008813559322034e-06, |
|
"loss": 0.0784, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"learning_rate": 1.008135593220339e-06, |
|
"loss": 0.2329, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"learning_rate": 1.0074576271186441e-06, |
|
"loss": 0.0417, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 1.006779661016949e-06, |
|
"loss": 0.1155, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 1.006101694915254e-06, |
|
"loss": 0.0537, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 1.005423728813559e-06, |
|
"loss": 0.0658, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"learning_rate": 1.0047457627118644e-06, |
|
"loss": 0.0241, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 1.0040677966101694e-06, |
|
"loss": 0.063, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 1.0033898305084745e-06, |
|
"loss": 0.0334, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 1.0027118644067795e-06, |
|
"loss": 0.0531, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"learning_rate": 1.0020338983050846e-06, |
|
"loss": 0.075, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"learning_rate": 1.0013559322033898e-06, |
|
"loss": 0.1308, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 1.000677966101695e-06, |
|
"loss": 0.263, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 1e-06, |
|
"loss": 0.0646, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 9.99322033898305e-07, |
|
"loss": 0.1697, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"learning_rate": 9.9864406779661e-07, |
|
"loss": 0.1115, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"learning_rate": 9.979661016949153e-07, |
|
"loss": 0.0688, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"learning_rate": 9.972881355932204e-07, |
|
"loss": 0.0401, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"learning_rate": 9.966101694915254e-07, |
|
"loss": 0.0223, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 9.959322033898305e-07, |
|
"loss": 0.0776, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 9.952542372881356e-07, |
|
"loss": 0.0407, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 10.22, |
|
"learning_rate": 9.945762711864406e-07, |
|
"loss": 0.0726, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 9.938983050847457e-07, |
|
"loss": 0.055, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 9.932203389830507e-07, |
|
"loss": 0.0968, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"learning_rate": 9.92542372881356e-07, |
|
"loss": 0.0592, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 9.91864406779661e-07, |
|
"loss": 0.1778, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 9.91186440677966e-07, |
|
"loss": 0.1782, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"learning_rate": 9.905084745762712e-07, |
|
"loss": 0.0839, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"learning_rate": 9.898305084745762e-07, |
|
"loss": 0.0448, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"learning_rate": 9.891525423728813e-07, |
|
"loss": 0.2248, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 9.884745762711863e-07, |
|
"loss": 0.0401, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"learning_rate": 9.877966101694914e-07, |
|
"loss": 0.0886, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"learning_rate": 9.871186440677966e-07, |
|
"loss": 0.0326, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"learning_rate": 9.864406779661017e-07, |
|
"loss": 0.0931, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"learning_rate": 9.857627118644068e-07, |
|
"loss": 0.069, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"learning_rate": 9.850847457627118e-07, |
|
"loss": 0.1535, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 9.844067796610169e-07, |
|
"loss": 0.0687, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"learning_rate": 9.83728813559322e-07, |
|
"loss": 0.0883, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"learning_rate": 9.830508474576272e-07, |
|
"loss": 0.0346, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"learning_rate": 9.82372881355932e-07, |
|
"loss": 0.0401, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 9.816949152542373e-07, |
|
"loss": 0.0254, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 9.810169491525424e-07, |
|
"loss": 0.0543, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"learning_rate": 9.803389830508474e-07, |
|
"loss": 0.0263, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 9.796610169491525e-07, |
|
"loss": 0.0876, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 9.789830508474575e-07, |
|
"loss": 0.0264, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 9.783050847457626e-07, |
|
"loss": 0.0293, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"learning_rate": 9.776271186440678e-07, |
|
"loss": 0.0385, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"learning_rate": 9.76949152542373e-07, |
|
"loss": 0.1289, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 9.76271186440678e-07, |
|
"loss": 0.0517, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"learning_rate": 9.75593220338983e-07, |
|
"loss": 0.143, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"learning_rate": 9.74915254237288e-07, |
|
"loss": 0.0375, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"learning_rate": 9.742372881355931e-07, |
|
"loss": 0.0317, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 9.735593220338982e-07, |
|
"loss": 0.1121, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 9.728813559322032e-07, |
|
"loss": 0.0701, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"learning_rate": 9.722033898305085e-07, |
|
"loss": 0.2551, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"learning_rate": 9.715254237288135e-07, |
|
"loss": 0.0532, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"learning_rate": 9.708474576271186e-07, |
|
"loss": 0.1007, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 9.701694915254237e-07, |
|
"loss": 0.0974, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 9.694915254237287e-07, |
|
"loss": 0.0702, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 9.68813559322034e-07, |
|
"loss": 0.0662, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"learning_rate": 9.681355932203388e-07, |
|
"loss": 0.1244, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 10.49, |
|
"learning_rate": 9.674576271186439e-07, |
|
"loss": 0.0324, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 10.49, |
|
"learning_rate": 9.667796610169491e-07, |
|
"loss": 0.0394, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 9.661016949152542e-07, |
|
"loss": 0.0366, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"learning_rate": 9.654237288135593e-07, |
|
"loss": 0.0771, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"learning_rate": 9.647457627118643e-07, |
|
"loss": 0.2417, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 9.640677966101694e-07, |
|
"loss": 0.0307, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"learning_rate": 9.633898305084746e-07, |
|
"loss": 0.0383, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"learning_rate": 9.627118644067797e-07, |
|
"loss": 0.0589, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 10.54, |
|
"learning_rate": 9.620338983050847e-07, |
|
"loss": 0.2179, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 9.613559322033898e-07, |
|
"loss": 0.0272, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 9.606779661016949e-07, |
|
"loss": 0.0946, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 9.6e-07, |
|
"loss": 0.1388, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"learning_rate": 9.59322033898305e-07, |
|
"loss": 0.0241, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"learning_rate": 9.5864406779661e-07, |
|
"loss": 0.1374, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 10.58, |
|
"learning_rate": 9.579661016949153e-07, |
|
"loss": 0.1069, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"learning_rate": 9.572881355932203e-07, |
|
"loss": 0.0575, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"learning_rate": 9.566101694915254e-07, |
|
"loss": 0.0337, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 9.559322033898305e-07, |
|
"loss": 0.0286, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 9.552542372881355e-07, |
|
"loss": 0.0446, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 9.545762711864408e-07, |
|
"loss": 0.0488, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 10.62, |
|
"learning_rate": 9.538983050847456e-07, |
|
"loss": 0.0918, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 9.532203389830508e-07, |
|
"loss": 0.1277, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 9.525423728813558e-07, |
|
"loss": 0.0363, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 9.51864406779661e-07, |
|
"loss": 0.2863, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 10.65, |
|
"learning_rate": 9.511864406779661e-07, |
|
"loss": 0.0457, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 10.65, |
|
"learning_rate": 9.505084745762711e-07, |
|
"loss": 0.0841, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 9.498305084745763e-07, |
|
"loss": 0.0411, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"learning_rate": 9.491525423728813e-07, |
|
"loss": 0.0781, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"learning_rate": 9.484745762711864e-07, |
|
"loss": 0.29, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 10.68, |
|
"learning_rate": 9.477966101694915e-07, |
|
"loss": 0.1361, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 9.471186440677965e-07, |
|
"loss": 0.0296, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 9.464406779661017e-07, |
|
"loss": 0.1135, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 9.457627118644067e-07, |
|
"loss": 0.3699, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"learning_rate": 9.450847457627118e-07, |
|
"loss": 0.0626, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"learning_rate": 9.444067796610169e-07, |
|
"loss": 0.1147, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"learning_rate": 9.43728813559322e-07, |
|
"loss": 0.0588, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 9.43050847457627e-07, |
|
"loss": 0.2042, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 9.423728813559322e-07, |
|
"loss": 0.0297, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 10.74, |
|
"learning_rate": 9.416949152542373e-07, |
|
"loss": 0.0562, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 9.410169491525424e-07, |
|
"loss": 0.0372, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 9.403389830508475e-07, |
|
"loss": 0.0276, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 9.396610169491524e-07, |
|
"loss": 0.1468, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 10.77, |
|
"learning_rate": 9.389830508474576e-07, |
|
"loss": 0.1258, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 10.77, |
|
"learning_rate": 9.383050847457626e-07, |
|
"loss": 0.0655, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"learning_rate": 9.376271186440677e-07, |
|
"loss": 0.1057, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 9.369491525423729e-07, |
|
"loss": 0.1058, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 9.362711864406779e-07, |
|
"loss": 0.1868, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 9.355932203389831e-07, |
|
"loss": 0.0221, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 10.81, |
|
"learning_rate": 9.349152542372881e-07, |
|
"loss": 0.1945, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 10.81, |
|
"learning_rate": 9.342372881355932e-07, |
|
"loss": 0.0807, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 10.82, |
|
"learning_rate": 9.335593220338983e-07, |
|
"loss": 0.043, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 9.328813559322033e-07, |
|
"loss": 0.0308, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 9.322033898305083e-07, |
|
"loss": 0.0649, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 10.84, |
|
"learning_rate": 9.315254237288135e-07, |
|
"loss": 0.1007, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 9.308474576271186e-07, |
|
"loss": 0.0588, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 9.301694915254237e-07, |
|
"loss": 0.0193, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 10.86, |
|
"learning_rate": 9.294915254237288e-07, |
|
"loss": 0.2193, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 9.288135593220338e-07, |
|
"loss": 0.1133, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 9.28135593220339e-07, |
|
"loss": 0.0732, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 9.274576271186441e-07, |
|
"loss": 0.0529, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 9.267796610169491e-07, |
|
"loss": 0.1215, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 9.261016949152543e-07, |
|
"loss": 0.0263, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 9.254237288135592e-07, |
|
"loss": 0.2879, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 10.91, |
|
"learning_rate": 9.247457627118644e-07, |
|
"loss": 0.05, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 10.91, |
|
"learning_rate": 9.240677966101694e-07, |
|
"loss": 0.0468, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"learning_rate": 9.233898305084745e-07, |
|
"loss": 0.2238, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 9.227118644067797e-07, |
|
"loss": 0.0409, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 9.220338983050847e-07, |
|
"loss": 0.0212, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 9.213559322033898e-07, |
|
"loss": 0.0686, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"learning_rate": 9.206779661016949e-07, |
|
"loss": 0.0202, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"learning_rate": 9.2e-07, |
|
"loss": 0.0578, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"learning_rate": 9.193220338983051e-07, |
|
"loss": 0.1923, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 9.186440677966101e-07, |
|
"loss": 0.0574, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 9.179661016949151e-07, |
|
"loss": 0.2171, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"learning_rate": 9.172881355932203e-07, |
|
"loss": 0.0264, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 9.166101694915254e-07, |
|
"loss": 0.0226, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 9.159322033898304e-07, |
|
"loss": 0.1663, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 9.152542372881356e-07, |
|
"loss": 0.1493, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_accuracy": 0.9475, |
|
"eval_loss": 0.14478321373462677, |
|
"eval_runtime": 2.1629, |
|
"eval_samples_per_second": 554.809, |
|
"eval_steps_per_second": 4.623, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 9.145762711864406e-07, |
|
"loss": 0.081, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 9.138983050847458e-07, |
|
"loss": 0.0363, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 11.02, |
|
"learning_rate": 9.132203389830509e-07, |
|
"loss": 0.225, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"learning_rate": 9.125423728813559e-07, |
|
"loss": 0.0189, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"learning_rate": 9.11864406779661e-07, |
|
"loss": 0.112, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"learning_rate": 9.11186440677966e-07, |
|
"loss": 0.0565, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"learning_rate": 9.105084745762711e-07, |
|
"loss": 0.0993, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"learning_rate": 9.098305084745762e-07, |
|
"loss": 0.0203, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 11.06, |
|
"learning_rate": 9.091525423728813e-07, |
|
"loss": 0.1224, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 9.084745762711864e-07, |
|
"loss": 0.0782, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 9.077966101694915e-07, |
|
"loss": 0.0613, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 11.08, |
|
"learning_rate": 9.071186440677966e-07, |
|
"loss": 0.1541, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 11.09, |
|
"learning_rate": 9.064406779661017e-07, |
|
"loss": 0.0877, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 11.09, |
|
"learning_rate": 9.057627118644068e-07, |
|
"loss": 0.0268, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"learning_rate": 9.050847457627118e-07, |
|
"loss": 0.0517, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"learning_rate": 9.044067796610169e-07, |
|
"loss": 0.029, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"learning_rate": 9.037288135593219e-07, |
|
"loss": 0.0643, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 11.12, |
|
"learning_rate": 9.030508474576271e-07, |
|
"loss": 0.1341, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"learning_rate": 9.023728813559322e-07, |
|
"loss": 0.0244, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"learning_rate": 9.016949152542372e-07, |
|
"loss": 0.0228, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 11.14, |
|
"learning_rate": 9.010169491525424e-07, |
|
"loss": 0.2094, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 11.15, |
|
"learning_rate": 9.003389830508474e-07, |
|
"loss": 0.0346, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 11.15, |
|
"learning_rate": 8.996610169491525e-07, |
|
"loss": 0.0725, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 8.989830508474576e-07, |
|
"loss": 0.1164, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 11.17, |
|
"learning_rate": 8.983050847457627e-07, |
|
"loss": 0.0226, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 11.17, |
|
"learning_rate": 8.976271186440677e-07, |
|
"loss": 0.2077, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 11.18, |
|
"learning_rate": 8.969491525423728e-07, |
|
"loss": 0.0591, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 11.19, |
|
"learning_rate": 8.962711864406779e-07, |
|
"loss": 0.032, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 11.19, |
|
"learning_rate": 8.95593220338983e-07, |
|
"loss": 0.0822, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"learning_rate": 8.949152542372881e-07, |
|
"loss": 0.0863, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 8.942372881355931e-07, |
|
"loss": 0.1718, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 8.935593220338983e-07, |
|
"loss": 0.0819, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 11.22, |
|
"learning_rate": 8.928813559322034e-07, |
|
"loss": 0.1918, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 11.23, |
|
"learning_rate": 8.922033898305085e-07, |
|
"loss": 0.0255, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 11.23, |
|
"learning_rate": 8.915254237288136e-07, |
|
"loss": 0.2546, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 11.24, |
|
"learning_rate": 8.908474576271186e-07, |
|
"loss": 0.0429, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 8.901694915254237e-07, |
|
"loss": 0.0385, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 8.894915254237287e-07, |
|
"loss": 0.0557, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 11.26, |
|
"learning_rate": 8.888135593220338e-07, |
|
"loss": 0.0666, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 8.88135593220339e-07, |
|
"loss": 0.0375, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 8.87457627118644e-07, |
|
"loss": 0.0403, |
|
"step": 1691 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"learning_rate": 8.867796610169492e-07, |
|
"loss": 0.1401, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 11.29, |
|
"learning_rate": 8.861016949152542e-07, |
|
"loss": 0.1327, |
|
"step": 1693 |
|
}, |
|
{ |
|
"epoch": 11.29, |
|
"learning_rate": 8.854237288135593e-07, |
|
"loss": 0.0674, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 11.3, |
|
"learning_rate": 8.847457627118644e-07, |
|
"loss": 0.1114, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 11.31, |
|
"learning_rate": 8.840677966101695e-07, |
|
"loss": 0.0906, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 11.31, |
|
"learning_rate": 8.833898305084745e-07, |
|
"loss": 0.1371, |
|
"step": 1697 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"learning_rate": 8.827118644067796e-07, |
|
"loss": 0.1079, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"learning_rate": 8.820338983050847e-07, |
|
"loss": 0.0557, |
|
"step": 1699 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"learning_rate": 8.813559322033897e-07, |
|
"loss": 0.1407, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 11.34, |
|
"learning_rate": 8.806779661016949e-07, |
|
"loss": 0.0174, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"learning_rate": 8.799999999999999e-07, |
|
"loss": 0.0196, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"learning_rate": 8.793220338983051e-07, |
|
"loss": 0.2311, |
|
"step": 1703 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"learning_rate": 8.786440677966102e-07, |
|
"loss": 0.0892, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"learning_rate": 8.779661016949152e-07, |
|
"loss": 0.0751, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"learning_rate": 8.772881355932204e-07, |
|
"loss": 0.2013, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 11.38, |
|
"learning_rate": 8.766101694915253e-07, |
|
"loss": 0.1109, |
|
"step": 1707 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 8.759322033898304e-07, |
|
"loss": 0.1075, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 8.752542372881355e-07, |
|
"loss": 0.0662, |
|
"step": 1709 |
|
}, |
|
{ |
|
"epoch": 11.4, |
|
"learning_rate": 8.745762711864406e-07, |
|
"loss": 0.0216, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"learning_rate": 8.738983050847458e-07, |
|
"loss": 0.0504, |
|
"step": 1711 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"learning_rate": 8.732203389830508e-07, |
|
"loss": 0.0234, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 11.42, |
|
"learning_rate": 8.725423728813559e-07, |
|
"loss": 0.0263, |
|
"step": 1713 |
|
}, |
|
{ |
|
"epoch": 11.43, |
|
"learning_rate": 8.71864406779661e-07, |
|
"loss": 0.0962, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 11.43, |
|
"learning_rate": 8.711864406779661e-07, |
|
"loss": 0.1633, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"learning_rate": 8.705084745762711e-07, |
|
"loss": 0.04, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"learning_rate": 8.698305084745763e-07, |
|
"loss": 0.0321, |
|
"step": 1717 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"learning_rate": 8.691525423728812e-07, |
|
"loss": 0.0356, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 11.46, |
|
"learning_rate": 8.684745762711864e-07, |
|
"loss": 0.1446, |
|
"step": 1719 |
|
}, |
|
{ |
|
"epoch": 11.47, |
|
"learning_rate": 8.677966101694915e-07, |
|
"loss": 0.0257, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 11.47, |
|
"learning_rate": 8.671186440677965e-07, |
|
"loss": 0.159, |
|
"step": 1721 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"learning_rate": 8.664406779661017e-07, |
|
"loss": 0.0947, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"learning_rate": 8.657627118644067e-07, |
|
"loss": 0.0194, |
|
"step": 1723 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"learning_rate": 8.650847457627118e-07, |
|
"loss": 0.0315, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 8.64406779661017e-07, |
|
"loss": 0.1295, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"learning_rate": 8.63728813559322e-07, |
|
"loss": 0.0317, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"learning_rate": 8.630508474576272e-07, |
|
"loss": 0.0466, |
|
"step": 1727 |
|
}, |
|
{ |
|
"epoch": 11.52, |
|
"learning_rate": 8.623728813559321e-07, |
|
"loss": 0.2306, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 8.616949152542372e-07, |
|
"loss": 0.2285, |
|
"step": 1729 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 8.610169491525423e-07, |
|
"loss": 0.0204, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 11.54, |
|
"learning_rate": 8.603389830508474e-07, |
|
"loss": 0.0379, |
|
"step": 1731 |
|
}, |
|
{ |
|
"epoch": 11.55, |
|
"learning_rate": 8.596610169491524e-07, |
|
"loss": 0.025, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 11.55, |
|
"learning_rate": 8.589830508474576e-07, |
|
"loss": 0.1142, |
|
"step": 1733 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 8.583050847457627e-07, |
|
"loss": 0.1409, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 11.57, |
|
"learning_rate": 8.576271186440678e-07, |
|
"loss": 0.0855, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 11.57, |
|
"learning_rate": 8.569491525423729e-07, |
|
"loss": 0.0196, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"learning_rate": 8.562711864406779e-07, |
|
"loss": 0.0458, |
|
"step": 1737 |
|
}, |
|
{ |
|
"epoch": 11.59, |
|
"learning_rate": 8.55593220338983e-07, |
|
"loss": 0.1197, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 11.59, |
|
"learning_rate": 8.54915254237288e-07, |
|
"loss": 0.0594, |
|
"step": 1739 |
|
}, |
|
{ |
|
"epoch": 11.6, |
|
"learning_rate": 8.542372881355931e-07, |
|
"loss": 0.0677, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"learning_rate": 8.535593220338983e-07, |
|
"loss": 0.0647, |
|
"step": 1741 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"learning_rate": 8.528813559322033e-07, |
|
"loss": 0.0388, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"learning_rate": 8.522033898305085e-07, |
|
"loss": 0.0583, |
|
"step": 1743 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"learning_rate": 8.515254237288135e-07, |
|
"loss": 0.1218, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"learning_rate": 8.508474576271186e-07, |
|
"loss": 0.0576, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 11.64, |
|
"learning_rate": 8.501694915254238e-07, |
|
"loss": 0.0614, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"learning_rate": 8.494915254237288e-07, |
|
"loss": 0.0291, |
|
"step": 1747 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"learning_rate": 8.488135593220339e-07, |
|
"loss": 0.0214, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 11.66, |
|
"learning_rate": 8.481355932203389e-07, |
|
"loss": 0.0295, |
|
"step": 1749 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 8.47457627118644e-07, |
|
"loss": 0.04, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 8.467796610169491e-07, |
|
"loss": 0.2888, |
|
"step": 1751 |
|
}, |
|
{ |
|
"epoch": 11.68, |
|
"learning_rate": 8.461016949152542e-07, |
|
"loss": 0.091, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 11.69, |
|
"learning_rate": 8.454237288135592e-07, |
|
"loss": 0.1011, |
|
"step": 1753 |
|
}, |
|
{ |
|
"epoch": 11.69, |
|
"learning_rate": 8.447457627118644e-07, |
|
"loss": 0.0537, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"learning_rate": 8.440677966101695e-07, |
|
"loss": 0.0212, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"learning_rate": 8.433898305084745e-07, |
|
"loss": 0.0602, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"learning_rate": 8.427118644067797e-07, |
|
"loss": 0.0292, |
|
"step": 1757 |
|
}, |
|
{ |
|
"epoch": 11.72, |
|
"learning_rate": 8.420338983050847e-07, |
|
"loss": 0.1767, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"learning_rate": 8.413559322033898e-07, |
|
"loss": 0.1482, |
|
"step": 1759 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"learning_rate": 8.406779661016948e-07, |
|
"loss": 0.1359, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"learning_rate": 8.399999999999999e-07, |
|
"loss": 0.165, |
|
"step": 1761 |
|
}, |
|
{ |
|
"epoch": 11.75, |
|
"learning_rate": 8.393220338983051e-07, |
|
"loss": 0.0285, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 11.75, |
|
"learning_rate": 8.386440677966101e-07, |
|
"loss": 0.1368, |
|
"step": 1763 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 8.379661016949152e-07, |
|
"loss": 0.0363, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"learning_rate": 8.372881355932203e-07, |
|
"loss": 0.0407, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"learning_rate": 8.366101694915254e-07, |
|
"loss": 0.1627, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 11.78, |
|
"learning_rate": 8.359322033898305e-07, |
|
"loss": 0.1292, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"learning_rate": 8.352542372881356e-07, |
|
"loss": 0.0687, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"learning_rate": 8.345762711864407e-07, |
|
"loss": 0.0187, |
|
"step": 1769 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"learning_rate": 8.338983050847457e-07, |
|
"loss": 0.0235, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 11.81, |
|
"learning_rate": 8.332203389830508e-07, |
|
"loss": 0.0354, |
|
"step": 1771 |
|
}, |
|
{ |
|
"epoch": 11.81, |
|
"learning_rate": 8.325423728813558e-07, |
|
"loss": 0.0961, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 11.82, |
|
"learning_rate": 8.31864406779661e-07, |
|
"loss": 0.0591, |
|
"step": 1773 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"learning_rate": 8.31186440677966e-07, |
|
"loss": 0.0528, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"learning_rate": 8.305084745762712e-07, |
|
"loss": 0.0346, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 11.84, |
|
"learning_rate": 8.298305084745763e-07, |
|
"loss": 0.053, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 11.85, |
|
"learning_rate": 8.291525423728813e-07, |
|
"loss": 0.0571, |
|
"step": 1777 |
|
}, |
|
{ |
|
"epoch": 11.85, |
|
"learning_rate": 8.284745762711865e-07, |
|
"loss": 0.3169, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 11.86, |
|
"learning_rate": 8.277966101694915e-07, |
|
"loss": 0.2546, |
|
"step": 1779 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"learning_rate": 8.271186440677965e-07, |
|
"loss": 0.0486, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"learning_rate": 8.264406779661016e-07, |
|
"loss": 0.1539, |
|
"step": 1781 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"learning_rate": 8.257627118644067e-07, |
|
"loss": 0.0455, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"learning_rate": 8.250847457627119e-07, |
|
"loss": 0.0232, |
|
"step": 1783 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"learning_rate": 8.244067796610169e-07, |
|
"loss": 0.0457, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 11.9, |
|
"learning_rate": 8.23728813559322e-07, |
|
"loss": 0.0172, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"learning_rate": 8.230508474576271e-07, |
|
"loss": 0.1517, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"learning_rate": 8.223728813559322e-07, |
|
"loss": 0.0472, |
|
"step": 1787 |
|
}, |
|
{ |
|
"epoch": 11.92, |
|
"learning_rate": 8.216949152542372e-07, |
|
"loss": 0.0212, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"learning_rate": 8.210169491525424e-07, |
|
"loss": 0.0432, |
|
"step": 1789 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"learning_rate": 8.203389830508474e-07, |
|
"loss": 0.1246, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 11.94, |
|
"learning_rate": 8.196610169491525e-07, |
|
"loss": 0.0208, |
|
"step": 1791 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"learning_rate": 8.189830508474576e-07, |
|
"loss": 0.0466, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"learning_rate": 8.183050847457626e-07, |
|
"loss": 0.0909, |
|
"step": 1793 |
|
}, |
|
{ |
|
"epoch": 11.96, |
|
"learning_rate": 8.176271186440678e-07, |
|
"loss": 0.0204, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 11.97, |
|
"learning_rate": 8.169491525423728e-07, |
|
"loss": 0.12, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 11.97, |
|
"learning_rate": 8.162711864406779e-07, |
|
"loss": 0.0938, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"learning_rate": 8.155932203389831e-07, |
|
"loss": 0.0808, |
|
"step": 1797 |
|
}, |
|
{ |
|
"epoch": 11.99, |
|
"learning_rate": 8.149152542372881e-07, |
|
"loss": 0.02, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 11.99, |
|
"learning_rate": 8.142372881355933e-07, |
|
"loss": 0.0523, |
|
"step": 1799 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 8.135593220338983e-07, |
|
"loss": 0.094, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.9525, |
|
"eval_loss": 0.13722257316112518, |
|
"eval_runtime": 2.1728, |
|
"eval_samples_per_second": 552.291, |
|
"eval_steps_per_second": 4.602, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 12.01, |
|
"learning_rate": 8.128813559322033e-07, |
|
"loss": 0.0227, |
|
"step": 1801 |
|
}, |
|
{ |
|
"epoch": 12.01, |
|
"learning_rate": 8.122033898305084e-07, |
|
"loss": 0.0743, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 12.02, |
|
"learning_rate": 8.115254237288135e-07, |
|
"loss": 0.0921, |
|
"step": 1803 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 8.108474576271185e-07, |
|
"loss": 0.0433, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 8.101694915254237e-07, |
|
"loss": 0.0233, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 12.04, |
|
"learning_rate": 8.094915254237288e-07, |
|
"loss": 0.2267, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 12.05, |
|
"learning_rate": 8.088135593220339e-07, |
|
"loss": 0.0152, |
|
"step": 1807 |
|
}, |
|
{ |
|
"epoch": 12.05, |
|
"learning_rate": 8.08135593220339e-07, |
|
"loss": 0.1364, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"learning_rate": 8.07457627118644e-07, |
|
"loss": 0.0307, |
|
"step": 1809 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"learning_rate": 8.067796610169492e-07, |
|
"loss": 0.0941, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"learning_rate": 8.061016949152541e-07, |
|
"loss": 0.0434, |
|
"step": 1811 |
|
}, |
|
{ |
|
"epoch": 12.08, |
|
"learning_rate": 8.054237288135592e-07, |
|
"loss": 0.0579, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 12.09, |
|
"learning_rate": 8.047457627118644e-07, |
|
"loss": 0.0385, |
|
"step": 1813 |
|
}, |
|
{ |
|
"epoch": 12.09, |
|
"learning_rate": 8.040677966101694e-07, |
|
"loss": 0.209, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 12.1, |
|
"learning_rate": 8.033898305084746e-07, |
|
"loss": 0.0209, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 12.11, |
|
"learning_rate": 8.027118644067796e-07, |
|
"loss": 0.0451, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 12.11, |
|
"learning_rate": 8.020338983050847e-07, |
|
"loss": 0.0444, |
|
"step": 1817 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 8.013559322033899e-07, |
|
"loss": 0.0255, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 12.13, |
|
"learning_rate": 8.006779661016949e-07, |
|
"loss": 0.1247, |
|
"step": 1819 |
|
}, |
|
{ |
|
"epoch": 12.13, |
|
"learning_rate": 8e-07, |
|
"loss": 0.176, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 12.14, |
|
"learning_rate": 7.993220338983051e-07, |
|
"loss": 0.0581, |
|
"step": 1821 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"learning_rate": 7.986440677966101e-07, |
|
"loss": 0.1404, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"learning_rate": 7.979661016949152e-07, |
|
"loss": 0.1071, |
|
"step": 1823 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"learning_rate": 7.972881355932203e-07, |
|
"loss": 0.0824, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 12.17, |
|
"learning_rate": 7.966101694915253e-07, |
|
"loss": 0.0981, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 12.17, |
|
"learning_rate": 7.959322033898305e-07, |
|
"loss": 0.0582, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 12.18, |
|
"learning_rate": 7.952542372881356e-07, |
|
"loss": 0.0143, |
|
"step": 1827 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"learning_rate": 7.945762711864406e-07, |
|
"loss": 0.0426, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"learning_rate": 7.938983050847458e-07, |
|
"loss": 0.1532, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 12.2, |
|
"learning_rate": 7.932203389830508e-07, |
|
"loss": 0.0176, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 7.92542372881356e-07, |
|
"loss": 0.0448, |
|
"step": 1831 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 7.918644067796609e-07, |
|
"loss": 0.0358, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 12.22, |
|
"learning_rate": 7.91186440677966e-07, |
|
"loss": 0.0301, |
|
"step": 1833 |
|
}, |
|
{ |
|
"epoch": 12.23, |
|
"learning_rate": 7.905084745762712e-07, |
|
"loss": 0.025, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 12.23, |
|
"learning_rate": 7.898305084745762e-07, |
|
"loss": 0.0306, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 7.891525423728813e-07, |
|
"loss": 0.0499, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"learning_rate": 7.884745762711864e-07, |
|
"loss": 0.0214, |
|
"step": 1837 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"learning_rate": 7.877966101694915e-07, |
|
"loss": 0.0269, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"learning_rate": 7.871186440677967e-07, |
|
"loss": 0.1555, |
|
"step": 1839 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"learning_rate": 7.864406779661017e-07, |
|
"loss": 0.0176, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"learning_rate": 7.857627118644068e-07, |
|
"loss": 0.0696, |
|
"step": 1841 |
|
}, |
|
{ |
|
"epoch": 12.28, |
|
"learning_rate": 7.850847457627118e-07, |
|
"loss": 0.0516, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 7.844067796610169e-07, |
|
"loss": 0.2515, |
|
"step": 1843 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 7.837288135593219e-07, |
|
"loss": 0.0157, |
|
"step": 1844 |
|
}, |
|
{ |
|
"epoch": 12.3, |
|
"learning_rate": 7.830508474576271e-07, |
|
"loss": 0.3365, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 12.31, |
|
"learning_rate": 7.823728813559321e-07, |
|
"loss": 0.0659, |
|
"step": 1846 |
|
}, |
|
{ |
|
"epoch": 12.31, |
|
"learning_rate": 7.816949152542373e-07, |
|
"loss": 0.2075, |
|
"step": 1847 |
|
}, |
|
{ |
|
"epoch": 12.32, |
|
"learning_rate": 7.810169491525424e-07, |
|
"loss": 0.0214, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"learning_rate": 7.803389830508474e-07, |
|
"loss": 0.0191, |
|
"step": 1849 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"learning_rate": 7.796610169491526e-07, |
|
"loss": 0.0968, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 12.34, |
|
"learning_rate": 7.789830508474576e-07, |
|
"loss": 0.0258, |
|
"step": 1851 |
|
}, |
|
{ |
|
"epoch": 12.35, |
|
"learning_rate": 7.783050847457627e-07, |
|
"loss": 0.0192, |
|
"step": 1852 |
|
}, |
|
{ |
|
"epoch": 12.35, |
|
"learning_rate": 7.776271186440677e-07, |
|
"loss": 0.0697, |
|
"step": 1853 |
|
}, |
|
{ |
|
"epoch": 12.36, |
|
"learning_rate": 7.769491525423728e-07, |
|
"loss": 0.0173, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 7.762711864406779e-07, |
|
"loss": 0.0179, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 7.75593220338983e-07, |
|
"loss": 0.1792, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 12.38, |
|
"learning_rate": 7.749152542372881e-07, |
|
"loss": 0.0428, |
|
"step": 1857 |
|
}, |
|
{ |
|
"epoch": 12.39, |
|
"learning_rate": 7.742372881355932e-07, |
|
"loss": 0.1118, |
|
"step": 1858 |
|
}, |
|
{ |
|
"epoch": 12.39, |
|
"learning_rate": 7.735593220338983e-07, |
|
"loss": 0.0186, |
|
"step": 1859 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 7.728813559322033e-07, |
|
"loss": 0.0226, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 12.41, |
|
"learning_rate": 7.722033898305085e-07, |
|
"loss": 0.0414, |
|
"step": 1861 |
|
}, |
|
{ |
|
"epoch": 12.41, |
|
"learning_rate": 7.715254237288136e-07, |
|
"loss": 0.018, |
|
"step": 1862 |
|
}, |
|
{ |
|
"epoch": 12.42, |
|
"learning_rate": 7.708474576271185e-07, |
|
"loss": 0.0611, |
|
"step": 1863 |
|
}, |
|
{ |
|
"epoch": 12.43, |
|
"learning_rate": 7.701694915254237e-07, |
|
"loss": 0.0439, |
|
"step": 1864 |
|
}, |
|
{ |
|
"epoch": 12.43, |
|
"learning_rate": 7.694915254237287e-07, |
|
"loss": 0.1583, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"learning_rate": 7.688135593220339e-07, |
|
"loss": 0.0423, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 12.45, |
|
"learning_rate": 7.681355932203389e-07, |
|
"loss": 0.1553, |
|
"step": 1867 |
|
}, |
|
{ |
|
"epoch": 12.45, |
|
"learning_rate": 7.67457627118644e-07, |
|
"loss": 0.0157, |
|
"step": 1868 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"learning_rate": 7.667796610169492e-07, |
|
"loss": 0.1468, |
|
"step": 1869 |
|
}, |
|
{ |
|
"epoch": 12.47, |
|
"learning_rate": 7.661016949152542e-07, |
|
"loss": 0.1471, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 12.47, |
|
"learning_rate": 7.654237288135594e-07, |
|
"loss": 0.0795, |
|
"step": 1871 |
|
}, |
|
{ |
|
"epoch": 12.48, |
|
"learning_rate": 7.647457627118644e-07, |
|
"loss": 0.02, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"learning_rate": 7.640677966101694e-07, |
|
"loss": 0.1232, |
|
"step": 1873 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"learning_rate": 7.633898305084745e-07, |
|
"loss": 0.1155, |
|
"step": 1874 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 7.627118644067796e-07, |
|
"loss": 0.0198, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 12.51, |
|
"learning_rate": 7.620338983050847e-07, |
|
"loss": 0.1619, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 12.51, |
|
"learning_rate": 7.613559322033898e-07, |
|
"loss": 0.2416, |
|
"step": 1877 |
|
}, |
|
{ |
|
"epoch": 12.52, |
|
"learning_rate": 7.606779661016949e-07, |
|
"loss": 0.0857, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"learning_rate": 7.599999999999999e-07, |
|
"loss": 0.1299, |
|
"step": 1879 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"learning_rate": 7.593220338983051e-07, |
|
"loss": 0.0285, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 12.54, |
|
"learning_rate": 7.586440677966101e-07, |
|
"loss": 0.0767, |
|
"step": 1881 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"learning_rate": 7.579661016949153e-07, |
|
"loss": 0.0193, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"learning_rate": 7.572881355932204e-07, |
|
"loss": 0.0185, |
|
"step": 1883 |
|
}, |
|
{ |
|
"epoch": 12.56, |
|
"learning_rate": 7.566101694915253e-07, |
|
"loss": 0.0485, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 12.57, |
|
"learning_rate": 7.559322033898305e-07, |
|
"loss": 0.1179, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 12.57, |
|
"learning_rate": 7.552542372881355e-07, |
|
"loss": 0.1148, |
|
"step": 1886 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"learning_rate": 7.545762711864406e-07, |
|
"loss": 0.0195, |
|
"step": 1887 |
|
}, |
|
{ |
|
"epoch": 12.59, |
|
"learning_rate": 7.538983050847457e-07, |
|
"loss": 0.0834, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 12.59, |
|
"learning_rate": 7.532203389830508e-07, |
|
"loss": 0.0482, |
|
"step": 1889 |
|
}, |
|
{ |
|
"epoch": 12.6, |
|
"learning_rate": 7.52542372881356e-07, |
|
"loss": 0.0625, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 7.51864406779661e-07, |
|
"loss": 0.0235, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 7.511864406779661e-07, |
|
"loss": 0.0796, |
|
"step": 1892 |
|
}, |
|
{ |
|
"epoch": 12.62, |
|
"learning_rate": 7.505084745762712e-07, |
|
"loss": 0.0281, |
|
"step": 1893 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"learning_rate": 7.498305084745762e-07, |
|
"loss": 0.1491, |
|
"step": 1894 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"learning_rate": 7.491525423728812e-07, |
|
"loss": 0.0249, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"learning_rate": 7.484745762711864e-07, |
|
"loss": 0.0944, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 12.65, |
|
"learning_rate": 7.477966101694914e-07, |
|
"loss": 0.1889, |
|
"step": 1897 |
|
}, |
|
{ |
|
"epoch": 12.65, |
|
"learning_rate": 7.471186440677966e-07, |
|
"loss": 0.0392, |
|
"step": 1898 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"learning_rate": 7.464406779661017e-07, |
|
"loss": 0.1033, |
|
"step": 1899 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 7.457627118644067e-07, |
|
"loss": 0.0419, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 7.450847457627119e-07, |
|
"loss": 0.1089, |
|
"step": 1901 |
|
}, |
|
{ |
|
"epoch": 12.68, |
|
"learning_rate": 7.444067796610169e-07, |
|
"loss": 0.0267, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 7.43728813559322e-07, |
|
"loss": 0.079, |
|
"step": 1903 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 7.430508474576272e-07, |
|
"loss": 0.0689, |
|
"step": 1904 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"learning_rate": 7.423728813559321e-07, |
|
"loss": 0.1049, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"learning_rate": 7.416949152542373e-07, |
|
"loss": 0.0277, |
|
"step": 1906 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"learning_rate": 7.410169491525423e-07, |
|
"loss": 0.0506, |
|
"step": 1907 |
|
}, |
|
{ |
|
"epoch": 12.72, |
|
"learning_rate": 7.403389830508474e-07, |
|
"loss": 0.1173, |
|
"step": 1908 |
|
}, |
|
{ |
|
"epoch": 12.73, |
|
"learning_rate": 7.396610169491525e-07, |
|
"loss": 0.0162, |
|
"step": 1909 |
|
}, |
|
{ |
|
"epoch": 12.73, |
|
"learning_rate": 7.389830508474576e-07, |
|
"loss": 0.0229, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 12.74, |
|
"learning_rate": 7.383050847457626e-07, |
|
"loss": 0.0869, |
|
"step": 1911 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 7.376271186440678e-07, |
|
"loss": 0.033, |
|
"step": 1912 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 7.369491525423729e-07, |
|
"loss": 0.0501, |
|
"step": 1913 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"learning_rate": 7.36271186440678e-07, |
|
"loss": 0.0581, |
|
"step": 1914 |
|
}, |
|
{ |
|
"epoch": 12.77, |
|
"learning_rate": 7.35593220338983e-07, |
|
"loss": 0.0955, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 12.77, |
|
"learning_rate": 7.34915254237288e-07, |
|
"loss": 0.1402, |
|
"step": 1916 |
|
}, |
|
{ |
|
"epoch": 12.78, |
|
"learning_rate": 7.342372881355932e-07, |
|
"loss": 0.0487, |
|
"step": 1917 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"learning_rate": 7.335593220338982e-07, |
|
"loss": 0.1935, |
|
"step": 1918 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"learning_rate": 7.328813559322033e-07, |
|
"loss": 0.1322, |
|
"step": 1919 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"learning_rate": 7.322033898305085e-07, |
|
"loss": 0.0488, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 7.315254237288135e-07, |
|
"loss": 0.0133, |
|
"step": 1921 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 7.308474576271187e-07, |
|
"loss": 0.0185, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 12.82, |
|
"learning_rate": 7.301694915254237e-07, |
|
"loss": 0.0196, |
|
"step": 1923 |
|
}, |
|
{ |
|
"epoch": 12.83, |
|
"learning_rate": 7.294915254237288e-07, |
|
"loss": 0.0639, |
|
"step": 1924 |
|
}, |
|
{ |
|
"epoch": 12.83, |
|
"learning_rate": 7.288135593220338e-07, |
|
"loss": 0.1526, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 7.281355932203389e-07, |
|
"loss": 0.1469, |
|
"step": 1926 |
|
}, |
|
{ |
|
"epoch": 12.85, |
|
"learning_rate": 7.27457627118644e-07, |
|
"loss": 0.0621, |
|
"step": 1927 |
|
}, |
|
{ |
|
"epoch": 12.85, |
|
"learning_rate": 7.267796610169491e-07, |
|
"loss": 0.1961, |
|
"step": 1928 |
|
}, |
|
{ |
|
"epoch": 12.86, |
|
"learning_rate": 7.261016949152542e-07, |
|
"loss": 0.0304, |
|
"step": 1929 |
|
}, |
|
{ |
|
"epoch": 12.87, |
|
"learning_rate": 7.254237288135593e-07, |
|
"loss": 0.016, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 12.87, |
|
"learning_rate": 7.247457627118644e-07, |
|
"loss": 0.0129, |
|
"step": 1931 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"learning_rate": 7.240677966101694e-07, |
|
"loss": 0.0342, |
|
"step": 1932 |
|
}, |
|
{ |
|
"epoch": 12.89, |
|
"learning_rate": 7.233898305084746e-07, |
|
"loss": 0.1346, |
|
"step": 1933 |
|
}, |
|
{ |
|
"epoch": 12.89, |
|
"learning_rate": 7.227118644067797e-07, |
|
"loss": 0.0674, |
|
"step": 1934 |
|
}, |
|
{ |
|
"epoch": 12.9, |
|
"learning_rate": 7.220338983050847e-07, |
|
"loss": 0.1377, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 7.213559322033898e-07, |
|
"loss": 0.0353, |
|
"step": 1936 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 7.206779661016948e-07, |
|
"loss": 0.083, |
|
"step": 1937 |
|
}, |
|
{ |
|
"epoch": 12.92, |
|
"learning_rate": 7.2e-07, |
|
"loss": 0.1333, |
|
"step": 1938 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"learning_rate": 7.19322033898305e-07, |
|
"loss": 0.0248, |
|
"step": 1939 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"learning_rate": 7.186440677966101e-07, |
|
"loss": 0.0488, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 12.94, |
|
"learning_rate": 7.179661016949153e-07, |
|
"loss": 0.0335, |
|
"step": 1941 |
|
}, |
|
{ |
|
"epoch": 12.95, |
|
"learning_rate": 7.172881355932203e-07, |
|
"loss": 0.0201, |
|
"step": 1942 |
|
}, |
|
{ |
|
"epoch": 12.95, |
|
"learning_rate": 7.166101694915254e-07, |
|
"loss": 0.0566, |
|
"step": 1943 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"learning_rate": 7.159322033898305e-07, |
|
"loss": 0.0592, |
|
"step": 1944 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 7.152542372881356e-07, |
|
"loss": 0.1423, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 7.145762711864406e-07, |
|
"loss": 0.0194, |
|
"step": 1946 |
|
}, |
|
{ |
|
"epoch": 12.98, |
|
"learning_rate": 7.138983050847457e-07, |
|
"loss": 0.1888, |
|
"step": 1947 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"learning_rate": 7.132203389830508e-07, |
|
"loss": 0.0184, |
|
"step": 1948 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"learning_rate": 7.125423728813559e-07, |
|
"loss": 0.1541, |
|
"step": 1949 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 7.11864406779661e-07, |
|
"loss": 0.0269, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_accuracy": 0.9533333333333334, |
|
"eval_loss": 0.13513699173927307, |
|
"eval_runtime": 2.1693, |
|
"eval_samples_per_second": 553.18, |
|
"eval_steps_per_second": 4.61, |
|
"step": 1950 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 3000, |
|
"num_train_epochs": 20, |
|
"save_steps": 500, |
|
"total_flos": 1664093699597952.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|