|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 126582, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9802499565499043e-05, |
|
"loss": 2.8425, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.960499913099809e-05, |
|
"loss": 2.4736, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.940749869649713e-05, |
|
"loss": 2.298, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9209998261996174e-05, |
|
"loss": 2.235, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.901249782749522e-05, |
|
"loss": 2.1605, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.881499739299427e-05, |
|
"loss": 2.083, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.861749695849331e-05, |
|
"loss": 2.0588, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.841999652399236e-05, |
|
"loss": 2.0555, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.82224960894914e-05, |
|
"loss": 2.0182, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.802499565499045e-05, |
|
"loss": 1.9641, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.782749522048949e-05, |
|
"loss": 1.9383, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.762999478598853e-05, |
|
"loss": 1.9275, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.743249435148758e-05, |
|
"loss": 1.8923, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.723499391698662e-05, |
|
"loss": 1.8957, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.703749348248566e-05, |
|
"loss": 1.8787, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.683999304798471e-05, |
|
"loss": 1.8516, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.664249261348375e-05, |
|
"loss": 1.8373, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6444992178982796e-05, |
|
"loss": 1.7909, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6247491744481844e-05, |
|
"loss": 1.8034, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.6049991309980885e-05, |
|
"loss": 1.8101, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.585249087547993e-05, |
|
"loss": 1.7702, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5654990440978974e-05, |
|
"loss": 1.7727, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5457490006478016e-05, |
|
"loss": 1.7602, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5259989571977063e-05, |
|
"loss": 1.761, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5062489137476105e-05, |
|
"loss": 1.7219, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.4864988702975146e-05, |
|
"loss": 1.7497, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4667488268474193e-05, |
|
"loss": 1.7244, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4469987833973235e-05, |
|
"loss": 1.7144, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4272487399472276e-05, |
|
"loss": 1.6983, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.4074986964971324e-05, |
|
"loss": 1.6779, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.387748653047037e-05, |
|
"loss": 1.676, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.367998609596941e-05, |
|
"loss": 1.6737, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.348248566146846e-05, |
|
"loss": 1.6605, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.32849852269675e-05, |
|
"loss": 1.6591, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.308748479246655e-05, |
|
"loss": 1.6598, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.288998435796559e-05, |
|
"loss": 1.6579, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.269248392346463e-05, |
|
"loss": 1.6587, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.249498348896368e-05, |
|
"loss": 1.65, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.229748305446272e-05, |
|
"loss": 1.6399, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.209998261996176e-05, |
|
"loss": 1.6409, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.190248218546081e-05, |
|
"loss": 1.6167, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.170498175095985e-05, |
|
"loss": 1.6468, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.15074813164589e-05, |
|
"loss": 1.6084, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1309980881957946e-05, |
|
"loss": 1.6214, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.111248044745699e-05, |
|
"loss": 1.6254, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.091498001295603e-05, |
|
"loss": 1.5892, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0717479578455077e-05, |
|
"loss": 1.5746, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.051997914395412e-05, |
|
"loss": 1.5701, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0322478709453166e-05, |
|
"loss": 1.6088, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.012497827495221e-05, |
|
"loss": 1.5952, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.992747784045125e-05, |
|
"loss": 1.5561, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9729977405950296e-05, |
|
"loss": 1.5707, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.953247697144934e-05, |
|
"loss": 1.5582, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9334976536948385e-05, |
|
"loss": 1.5612, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9137476102447426e-05, |
|
"loss": 1.5552, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.8939975667946474e-05, |
|
"loss": 1.5632, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8742475233445515e-05, |
|
"loss": 1.538, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.854497479894456e-05, |
|
"loss": 1.5257, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8347474364443604e-05, |
|
"loss": 1.5355, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8149973929942645e-05, |
|
"loss": 1.5233, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.795247349544169e-05, |
|
"loss": 1.5154, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7754973060940734e-05, |
|
"loss": 1.5493, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.755747262643978e-05, |
|
"loss": 1.5327, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.735997219193882e-05, |
|
"loss": 1.5255, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7162471757437864e-05, |
|
"loss": 1.51, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.696497132293691e-05, |
|
"loss": 1.5122, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.676747088843596e-05, |
|
"loss": 1.5006, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6569970453935e-05, |
|
"loss": 1.5106, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.637247001943405e-05, |
|
"loss": 1.5031, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.617496958493309e-05, |
|
"loss": 1.5024, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.597746915043213e-05, |
|
"loss": 1.5231, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.577996871593118e-05, |
|
"loss": 1.5271, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.558246828143022e-05, |
|
"loss": 1.508, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.538496784692926e-05, |
|
"loss": 1.4859, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.518746741242831e-05, |
|
"loss": 1.4864, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.498996697792735e-05, |
|
"loss": 1.5045, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.47924665434264e-05, |
|
"loss": 1.4773, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.459496610892544e-05, |
|
"loss": 1.4704, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.439746567442449e-05, |
|
"loss": 1.4816, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4199965239923535e-05, |
|
"loss": 1.4899, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.4002464805422576e-05, |
|
"loss": 1.463, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.380496437092162e-05, |
|
"loss": 1.4847, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3607463936420665e-05, |
|
"loss": 1.478, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3409963501919706e-05, |
|
"loss": 1.4617, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.321246306741875e-05, |
|
"loss": 1.3742, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3014962632917795e-05, |
|
"loss": 1.3036, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2817462198416836e-05, |
|
"loss": 1.3172, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.261996176391588e-05, |
|
"loss": 1.2944, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2422461329414925e-05, |
|
"loss": 1.319, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2224960894913966e-05, |
|
"loss": 1.3015, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.2027460460413014e-05, |
|
"loss": 1.3011, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.182996002591206e-05, |
|
"loss": 1.295, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.16324595914111e-05, |
|
"loss": 1.2994, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.143495915691015e-05, |
|
"loss": 1.3215, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.123745872240919e-05, |
|
"loss": 1.3197, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.103995828790823e-05, |
|
"loss": 1.3114, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.084245785340728e-05, |
|
"loss": 1.2988, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.064495741890632e-05, |
|
"loss": 1.3004, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0447456984405366e-05, |
|
"loss": 1.275, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.024995654990441e-05, |
|
"loss": 1.2822, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0052456115403455e-05, |
|
"loss": 1.3113, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9854955680902503e-05, |
|
"loss": 1.3087, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9657455246401544e-05, |
|
"loss": 1.3039, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9459954811900585e-05, |
|
"loss": 1.2772, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9262454377399633e-05, |
|
"loss": 1.2918, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9064953942898678e-05, |
|
"loss": 1.2815, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.886745350839772e-05, |
|
"loss": 1.3004, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8669953073896767e-05, |
|
"loss": 1.2761, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8472452639395808e-05, |
|
"loss": 1.2927, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.827495220489485e-05, |
|
"loss": 1.2815, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8077451770393897e-05, |
|
"loss": 1.27, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.787995133589294e-05, |
|
"loss": 1.2928, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7682450901391982e-05, |
|
"loss": 1.2752, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.748495046689103e-05, |
|
"loss": 1.2935, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.728745003239007e-05, |
|
"loss": 1.2794, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.708994959788912e-05, |
|
"loss": 1.2956, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.689244916338816e-05, |
|
"loss": 1.2856, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6694948728887205e-05, |
|
"loss": 1.2743, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6497448294386253e-05, |
|
"loss": 1.2743, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6299947859885294e-05, |
|
"loss": 1.2753, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6102447425384335e-05, |
|
"loss": 1.2653, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5904946990883383e-05, |
|
"loss": 1.2778, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5707446556382424e-05, |
|
"loss": 1.2726, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.550994612188147e-05, |
|
"loss": 1.2773, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5312445687380516e-05, |
|
"loss": 1.2632, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5114945252879557e-05, |
|
"loss": 1.2751, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4917444818378602e-05, |
|
"loss": 1.2607, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4719944383877646e-05, |
|
"loss": 1.2556, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4522443949376688e-05, |
|
"loss": 1.235, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4324943514875735e-05, |
|
"loss": 1.2502, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.412744308037478e-05, |
|
"loss": 1.2633, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.392994264587382e-05, |
|
"loss": 1.2558, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3732442211372865e-05, |
|
"loss": 1.2659, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.353494177687191e-05, |
|
"loss": 1.258, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3337441342370954e-05, |
|
"loss": 1.2609, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.313994090787e-05, |
|
"loss": 1.2485, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.2942440473369043e-05, |
|
"loss": 1.2563, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2744940038868088e-05, |
|
"loss": 1.2621, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.254743960436713e-05, |
|
"loss": 1.246, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2349939169866174e-05, |
|
"loss": 1.2327, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2152438735365218e-05, |
|
"loss": 1.2388, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.1954938300864263e-05, |
|
"loss": 1.2443, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1757437866363307e-05, |
|
"loss": 1.2585, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.155993743186235e-05, |
|
"loss": 1.2376, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1362436997361396e-05, |
|
"loss": 1.2317, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.116493656286044e-05, |
|
"loss": 1.2279, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.096743612835948e-05, |
|
"loss": 1.2444, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0769935693858526e-05, |
|
"loss": 1.2488, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0572435259357574e-05, |
|
"loss": 1.2314, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0374934824856615e-05, |
|
"loss": 1.2385, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.017743439035566e-05, |
|
"loss": 1.2219, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9979933955854704e-05, |
|
"loss": 1.2491, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.978243352135375e-05, |
|
"loss": 1.229, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9584933086852793e-05, |
|
"loss": 1.2233, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9387432652351838e-05, |
|
"loss": 1.2521, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9189932217850882e-05, |
|
"loss": 1.2214, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8992431783349923e-05, |
|
"loss": 1.2162, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8794931348848968e-05, |
|
"loss": 1.2178, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8597430914348012e-05, |
|
"loss": 1.2418, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8399930479847057e-05, |
|
"loss": 1.2453, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.82024300453461e-05, |
|
"loss": 1.228, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8004929610845146e-05, |
|
"loss": 1.2036, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.780742917634419e-05, |
|
"loss": 1.2078, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.760992874184323e-05, |
|
"loss": 1.2106, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7412428307342276e-05, |
|
"loss": 1.2187, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.721492787284132e-05, |
|
"loss": 1.2267, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7017427438340368e-05, |
|
"loss": 1.2262, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.681992700383941e-05, |
|
"loss": 1.2185, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6622426569338454e-05, |
|
"loss": 1.1781, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6424926134837498e-05, |
|
"loss": 1.0483, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.622742570033654e-05, |
|
"loss": 1.0659, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6029925265835584e-05, |
|
"loss": 1.0643, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.583242483133463e-05, |
|
"loss": 1.0453, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5634924396833676e-05, |
|
"loss": 1.0541, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5437423962332717e-05, |
|
"loss": 1.0534, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5239923527831762e-05, |
|
"loss": 1.0641, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5042423093330806e-05, |
|
"loss": 1.0771, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4844922658829849e-05, |
|
"loss": 1.0638, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4647422224328893e-05, |
|
"loss": 1.0677, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4449921789827938e-05, |
|
"loss": 1.0776, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4252421355326984e-05, |
|
"loss": 1.064, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4054920920826025e-05, |
|
"loss": 1.0548, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3857420486325071e-05, |
|
"loss": 1.0606, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3659920051824116e-05, |
|
"loss": 1.0699, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3462419617323157e-05, |
|
"loss": 1.0721, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3264919182822203e-05, |
|
"loss": 1.0438, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3067418748321248e-05, |
|
"loss": 1.0425, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2869918313820292e-05, |
|
"loss": 1.0501, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2672417879319335e-05, |
|
"loss": 1.0307, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.247491744481838e-05, |
|
"loss": 1.037, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2277417010317422e-05, |
|
"loss": 1.0401, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2079916575816467e-05, |
|
"loss": 1.0504, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1882416141315511e-05, |
|
"loss": 1.0381, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1684915706814556e-05, |
|
"loss": 1.0302, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.14874152723136e-05, |
|
"loss": 1.0474, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1289914837812645e-05, |
|
"loss": 1.0454, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1092414403311688e-05, |
|
"loss": 1.0531, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0894913968810732e-05, |
|
"loss": 1.0448, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0697413534309776e-05, |
|
"loss": 1.0562, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.049991309980882e-05, |
|
"loss": 1.0432, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0302412665307864e-05, |
|
"loss": 1.0436, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0104912230806908e-05, |
|
"loss": 1.0419, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.907411796305953e-06, |
|
"loss": 1.0457, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.709911361804996e-06, |
|
"loss": 1.0409, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.51241092730404e-06, |
|
"loss": 1.031, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.314910492803085e-06, |
|
"loss": 1.0356, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.117410058302129e-06, |
|
"loss": 1.0606, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.919909623801174e-06, |
|
"loss": 1.0432, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.722409189300216e-06, |
|
"loss": 1.0254, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.52490875479926e-06, |
|
"loss": 1.0482, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.327408320298305e-06, |
|
"loss": 1.0522, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.129907885797348e-06, |
|
"loss": 1.0263, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.932407451296393e-06, |
|
"loss": 1.0546, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.734907016795437e-06, |
|
"loss": 1.0475, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.5374065822944816e-06, |
|
"loss": 1.0293, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.339906147793525e-06, |
|
"loss": 1.0497, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.14240571329257e-06, |
|
"loss": 1.0448, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.944905278791613e-06, |
|
"loss": 1.0275, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.747404844290659e-06, |
|
"loss": 1.0281, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.549904409789702e-06, |
|
"loss": 1.0295, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.352403975288745e-06, |
|
"loss": 1.0143, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.1549035407877905e-06, |
|
"loss": 1.0277, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.957403106286834e-06, |
|
"loss": 1.0266, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.759902671785878e-06, |
|
"loss": 1.0502, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.562402237284922e-06, |
|
"loss": 1.0312, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.364901802783967e-06, |
|
"loss": 1.0444, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.16740136828301e-06, |
|
"loss": 1.0257, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.969900933782055e-06, |
|
"loss": 1.0199, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.7724004992810985e-06, |
|
"loss": 1.0178, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.574900064780143e-06, |
|
"loss": 1.0197, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.377399630279187e-06, |
|
"loss": 1.0181, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.179899195778231e-06, |
|
"loss": 1.0492, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.982398761277275e-06, |
|
"loss": 1.035, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.7848983267763193e-06, |
|
"loss": 1.0256, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.5873978922753633e-06, |
|
"loss": 1.0031, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3898974577744074e-06, |
|
"loss": 1.0208, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1923970232734515e-06, |
|
"loss": 1.0215, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9948965887724956e-06, |
|
"loss": 1.0196, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.7973961542715396e-06, |
|
"loss": 1.0366, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.5998957197705837e-06, |
|
"loss": 1.0268, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.4023952852696278e-06, |
|
"loss": 1.0141, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.204894850768672e-06, |
|
"loss": 1.0083, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.007394416267716e-06, |
|
"loss": 1.0266, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.80989398176676e-06, |
|
"loss": 1.0216, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.612393547265804e-06, |
|
"loss": 1.0168, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.414893112764848e-06, |
|
"loss": 1.0004, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2173926782638924e-06, |
|
"loss": 1.026, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0198922437629362e-06, |
|
"loss": 1.0042, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.223918092619805e-07, |
|
"loss": 0.9972, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.248913747610245e-07, |
|
"loss": 1.0221, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.273909402600686e-07, |
|
"loss": 1.02, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.2989050575911268e-07, |
|
"loss": 1.0402, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.239007125815677e-08, |
|
"loss": 1.0188, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 126582, |
|
"total_flos": 1.9038716367888384e+17, |
|
"train_loss": 1.3316941024496511, |
|
"train_runtime": 32427.393, |
|
"train_samples_per_second": 39.035, |
|
"train_steps_per_second": 3.904 |
|
} |
|
], |
|
"max_steps": 126582, |
|
"num_train_epochs": 3, |
|
"total_flos": 1.9038716367888384e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|