|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"global_step": 239755, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9895726887864694e-05, |
|
"loss": 2.0097, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.979145377572939e-05, |
|
"loss": 1.8204, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.968718066359409e-05, |
|
"loss": 1.6177, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.958290755145878e-05, |
|
"loss": 1.543, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.947863443932348e-05, |
|
"loss": 1.5073, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.937436132718817e-05, |
|
"loss": 1.4273, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9270088215052865e-05, |
|
"loss": 1.3995, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.916581510291756e-05, |
|
"loss": 1.2902, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.906154199078226e-05, |
|
"loss": 1.2541, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.895726887864695e-05, |
|
"loss": 1.1981, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.885299576651165e-05, |
|
"loss": 1.1933, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.8748722654376343e-05, |
|
"loss": 1.1827, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.8644449542241035e-05, |
|
"loss": 1.1112, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.8540176430105734e-05, |
|
"loss": 1.1385, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.843590331797043e-05, |
|
"loss": 1.0471, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.8331630205835124e-05, |
|
"loss": 1.0287, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.822735709369982e-05, |
|
"loss": 1.0343, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.8123083981564514e-05, |
|
"loss": 1.0191, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.801881086942921e-05, |
|
"loss": 1.0007, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.791453775729391e-05, |
|
"loss": 0.9207, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.78102646451586e-05, |
|
"loss": 0.9356, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.7705991533023294e-05, |
|
"loss": 0.8999, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.760171842088799e-05, |
|
"loss": 0.9197, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.7497445308752684e-05, |
|
"loss": 0.8851, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.739317219661738e-05, |
|
"loss": 0.8467, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.728889908448208e-05, |
|
"loss": 0.8545, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.718462597234677e-05, |
|
"loss": 0.844, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.708035286021147e-05, |
|
"loss": 0.8036, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.697607974807616e-05, |
|
"loss": 0.7726, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.6871806635940855e-05, |
|
"loss": 0.8192, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.676753352380555e-05, |
|
"loss": 0.786, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.666326041167025e-05, |
|
"loss": 0.7673, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.6558987299534944e-05, |
|
"loss": 0.7822, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.645471418739964e-05, |
|
"loss": 0.7499, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.6350441075264334e-05, |
|
"loss": 0.7388, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.6246167963129025e-05, |
|
"loss": 0.7096, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.614189485099373e-05, |
|
"loss": 0.6983, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.603762173885842e-05, |
|
"loss": 0.712, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.5933348626723114e-05, |
|
"loss": 0.6714, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.582907551458781e-05, |
|
"loss": 0.6959, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.5724802402452504e-05, |
|
"loss": 0.6654, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.5620529290317196e-05, |
|
"loss": 0.659, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.55162561781819e-05, |
|
"loss": 0.6296, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.541198306604659e-05, |
|
"loss": 0.6371, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.5307709953911285e-05, |
|
"loss": 0.6505, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.520343684177598e-05, |
|
"loss": 0.621, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.5099163729640675e-05, |
|
"loss": 0.6013, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.4994890617505366e-05, |
|
"loss": 0.6206, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.489061750537007e-05, |
|
"loss": 0.5886, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.478634439323476e-05, |
|
"loss": 0.5938, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.4682071281099455e-05, |
|
"loss": 0.5512, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.4577798168964154e-05, |
|
"loss": 0.5524, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.4473525056828845e-05, |
|
"loss": 0.5555, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.4369251944693544e-05, |
|
"loss": 0.5736, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.426497883255824e-05, |
|
"loss": 0.5437, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.4160705720422934e-05, |
|
"loss": 0.5492, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.4056432608287626e-05, |
|
"loss": 0.5497, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.3952159496152324e-05, |
|
"loss": 0.538, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.3847886384017016e-05, |
|
"loss": 0.5215, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.3743613271881714e-05, |
|
"loss": 0.4956, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.363934015974641e-05, |
|
"loss": 0.5114, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.3535067047611104e-05, |
|
"loss": 0.51, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.34307939354758e-05, |
|
"loss": 0.5159, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.3326520823340495e-05, |
|
"loss": 0.4825, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.3222247711205186e-05, |
|
"loss": 0.495, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3117974599069885e-05, |
|
"loss": 0.4683, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.301370148693458e-05, |
|
"loss": 0.4663, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.2909428374799275e-05, |
|
"loss": 0.4545, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.280515526266397e-05, |
|
"loss": 0.4684, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.2700882150528665e-05, |
|
"loss": 0.4444, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.2596609038393363e-05, |
|
"loss": 0.4467, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.249233592625806e-05, |
|
"loss": 0.4538, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.2388062814122754e-05, |
|
"loss": 0.4192, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.2283789701987445e-05, |
|
"loss": 0.4444, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.2179516589852144e-05, |
|
"loss": 0.4396, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.2075243477716836e-05, |
|
"loss": 0.4456, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.1970970365581534e-05, |
|
"loss": 0.4337, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.186669725344623e-05, |
|
"loss": 0.4108, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.1762424141310924e-05, |
|
"loss": 0.4416, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.1658151029175616e-05, |
|
"loss": 0.4061, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.1553877917040314e-05, |
|
"loss": 0.3816, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.1449604804905006e-05, |
|
"loss": 0.3916, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.1345331692769704e-05, |
|
"loss": 0.3914, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.12410585806344e-05, |
|
"loss": 0.4327, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.1136785468499095e-05, |
|
"loss": 0.3784, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.1032512356363786e-05, |
|
"loss": 0.4018, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.0928239244228485e-05, |
|
"loss": 0.3416, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.0823966132093177e-05, |
|
"loss": 0.3602, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.0719693019957875e-05, |
|
"loss": 0.3911, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.0615419907822573e-05, |
|
"loss": 0.4008, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.0511146795687265e-05, |
|
"loss": 0.3659, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.040687368355196e-05, |
|
"loss": 0.3536, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.0302600571416655e-05, |
|
"loss": 0.3782, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.0198327459281354e-05, |
|
"loss": 0.3624, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.0094054347146045e-05, |
|
"loss": 0.3284, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9989781235010744e-05, |
|
"loss": 0.1845, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9885508122875436e-05, |
|
"loss": 0.2659, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9781235010740134e-05, |
|
"loss": 0.2861, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.9676961898604826e-05, |
|
"loss": 0.2826, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.9572688786469524e-05, |
|
"loss": 0.2703, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.9468415674334216e-05, |
|
"loss": 0.2455, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.9364142562198914e-05, |
|
"loss": 0.2555, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.9259869450063606e-05, |
|
"loss": 0.262, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.9155596337928305e-05, |
|
"loss": 0.2483, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.9051323225792996e-05, |
|
"loss": 0.2604, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.8947050113657695e-05, |
|
"loss": 0.2376, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.884277700152239e-05, |
|
"loss": 0.2499, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.8738503889387085e-05, |
|
"loss": 0.2632, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.863423077725178e-05, |
|
"loss": 0.2506, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.8529957665116475e-05, |
|
"loss": 0.2541, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.8425684552981174e-05, |
|
"loss": 0.2272, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.8321411440845865e-05, |
|
"loss": 0.2688, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.8217138328710564e-05, |
|
"loss": 0.235, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.8112865216575255e-05, |
|
"loss": 0.2608, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.800859210443995e-05, |
|
"loss": 0.2506, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.7904318992304646e-05, |
|
"loss": 0.2437, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.7800045880169344e-05, |
|
"loss": 0.2361, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.7695772768034036e-05, |
|
"loss": 0.2512, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.7591499655898734e-05, |
|
"loss": 0.2152, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.7487226543763426e-05, |
|
"loss": 0.2545, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.738295343162812e-05, |
|
"loss": 0.2286, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.7278680319492816e-05, |
|
"loss": 0.2297, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.7174407207357515e-05, |
|
"loss": 0.2478, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.7070134095222206e-05, |
|
"loss": 0.2344, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.6965860983086905e-05, |
|
"loss": 0.2134, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.6861587870951596e-05, |
|
"loss": 0.241, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.675731475881629e-05, |
|
"loss": 0.2374, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.6653041646680993e-05, |
|
"loss": 0.216, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.6548768534545685e-05, |
|
"loss": 0.2258, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.644449542241038e-05, |
|
"loss": 0.2325, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.6340222310275075e-05, |
|
"loss": 0.2286, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.623594919813977e-05, |
|
"loss": 0.2417, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.6131676086004465e-05, |
|
"loss": 0.2324, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 3.6027402973869164e-05, |
|
"loss": 0.2056, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.5923129861733856e-05, |
|
"loss": 0.231, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.581885674959855e-05, |
|
"loss": 0.2391, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.5714583637463246e-05, |
|
"loss": 0.2188, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.561031052532794e-05, |
|
"loss": 0.213, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.5506037413192636e-05, |
|
"loss": 0.2096, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.5401764301057334e-05, |
|
"loss": 0.24, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.5297491188922026e-05, |
|
"loss": 0.2246, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.5193218076786725e-05, |
|
"loss": 0.2104, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.5088944964651416e-05, |
|
"loss": 0.2397, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.498467185251611e-05, |
|
"loss": 0.2076, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.4880398740380806e-05, |
|
"loss": 0.1854, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.4776125628245505e-05, |
|
"loss": 0.2053, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.4671852516110197e-05, |
|
"loss": 0.1901, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.4567579403974895e-05, |
|
"loss": 0.2008, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.446330629183959e-05, |
|
"loss": 0.2115, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.435903317970428e-05, |
|
"loss": 0.2057, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.4254760067568984e-05, |
|
"loss": 0.1976, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.4150486955433675e-05, |
|
"loss": 0.1856, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.404621384329837e-05, |
|
"loss": 0.2248, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.3941940731163066e-05, |
|
"loss": 0.1992, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.383766761902776e-05, |
|
"loss": 0.2046, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.373339450689245e-05, |
|
"loss": 0.1814, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.3629121394757154e-05, |
|
"loss": 0.193, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.3524848282621846e-05, |
|
"loss": 0.1805, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.342057517048654e-05, |
|
"loss": 0.2065, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.3316302058351236e-05, |
|
"loss": 0.1871, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.321202894621593e-05, |
|
"loss": 0.191, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.3107755834080626e-05, |
|
"loss": 0.1948, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.3003482721945325e-05, |
|
"loss": 0.1803, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 3.2899209609810016e-05, |
|
"loss": 0.1796, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.279493649767471e-05, |
|
"loss": 0.1847, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.2690663385539407e-05, |
|
"loss": 0.1512, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.25863902734041e-05, |
|
"loss": 0.2074, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.24821171612688e-05, |
|
"loss": 0.1718, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.2377844049133495e-05, |
|
"loss": 0.1735, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.227357093699819e-05, |
|
"loss": 0.1928, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 3.216929782486288e-05, |
|
"loss": 0.1916, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.206502471272758e-05, |
|
"loss": 0.1708, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.196075160059227e-05, |
|
"loss": 0.1976, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.185647848845697e-05, |
|
"loss": 0.1646, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.1752205376321666e-05, |
|
"loss": 0.1641, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.164793226418636e-05, |
|
"loss": 0.1692, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.1543659152051056e-05, |
|
"loss": 0.1572, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.143938603991575e-05, |
|
"loss": 0.1892, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.1335112927780446e-05, |
|
"loss": 0.1725, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.123083981564514e-05, |
|
"loss": 0.1531, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.1126566703509836e-05, |
|
"loss": 0.1645, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.102229359137453e-05, |
|
"loss": 0.1838, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.0918020479239226e-05, |
|
"loss": 0.187, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.081374736710392e-05, |
|
"loss": 0.1606, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.0709474254968617e-05, |
|
"loss": 0.1669, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.0605201142833315e-05, |
|
"loss": 0.1454, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.0500928030698007e-05, |
|
"loss": 0.1528, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.0396654918562702e-05, |
|
"loss": 0.1522, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.0292381806427393e-05, |
|
"loss": 0.1398, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.018810869429209e-05, |
|
"loss": 0.1848, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.0083835582156787e-05, |
|
"loss": 0.1568, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.9979562470021482e-05, |
|
"loss": 0.0976, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.9875289357886177e-05, |
|
"loss": 0.0937, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.9771016245750872e-05, |
|
"loss": 0.1108, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.9666743133615564e-05, |
|
"loss": 0.1167, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.9562470021480266e-05, |
|
"loss": 0.0964, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.945819690934496e-05, |
|
"loss": 0.1095, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.9353923797209653e-05, |
|
"loss": 0.1088, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.9249650685074348e-05, |
|
"loss": 0.1049, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.9145377572939043e-05, |
|
"loss": 0.1072, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.9041104460803738e-05, |
|
"loss": 0.1173, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.8936831348668436e-05, |
|
"loss": 0.1058, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.883255823653313e-05, |
|
"loss": 0.1017, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.8728285124397823e-05, |
|
"loss": 0.1008, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.8624012012262518e-05, |
|
"loss": 0.1167, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.8519738900127213e-05, |
|
"loss": 0.0925, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.841546578799191e-05, |
|
"loss": 0.1107, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.8311192675856607e-05, |
|
"loss": 0.1131, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.8206919563721302e-05, |
|
"loss": 0.1059, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.8102646451585997e-05, |
|
"loss": 0.1073, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.799837333945069e-05, |
|
"loss": 0.112, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.7894100227315384e-05, |
|
"loss": 0.1114, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.7789827115180082e-05, |
|
"loss": 0.1333, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.7685554003044777e-05, |
|
"loss": 0.1007, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.7581280890909472e-05, |
|
"loss": 0.1127, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.7477007778774168e-05, |
|
"loss": 0.1023, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.737273466663886e-05, |
|
"loss": 0.0971, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.7268461554503554e-05, |
|
"loss": 0.1018, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.7164188442368256e-05, |
|
"loss": 0.1137, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.7059915330232948e-05, |
|
"loss": 0.106, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.6955642218097643e-05, |
|
"loss": 0.0998, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.6851369105962338e-05, |
|
"loss": 0.0934, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.6747095993827033e-05, |
|
"loss": 0.1166, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.6642822881691725e-05, |
|
"loss": 0.1028, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.6538549769556427e-05, |
|
"loss": 0.1005, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.643427665742112e-05, |
|
"loss": 0.1105, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.6330003545285813e-05, |
|
"loss": 0.1114, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.622573043315051e-05, |
|
"loss": 0.0951, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.6121457321015204e-05, |
|
"loss": 0.1141, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.6017184208879895e-05, |
|
"loss": 0.0885, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.5912911096744597e-05, |
|
"loss": 0.112, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.5808637984609292e-05, |
|
"loss": 0.1167, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.5704364872473984e-05, |
|
"loss": 0.0975, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.560009176033868e-05, |
|
"loss": 0.0982, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.5495818648203374e-05, |
|
"loss": 0.1061, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.5391545536068073e-05, |
|
"loss": 0.1037, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 2.5287272423932768e-05, |
|
"loss": 0.0899, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.5182999311797463e-05, |
|
"loss": 0.0965, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 2.5078726199662154e-05, |
|
"loss": 0.1122, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 2.4974453087526853e-05, |
|
"loss": 0.1096, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.4870179975391548e-05, |
|
"loss": 0.0948, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 2.476590686325624e-05, |
|
"loss": 0.0827, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.4661633751120938e-05, |
|
"loss": 0.0865, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.4557360638985633e-05, |
|
"loss": 0.0936, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.445308752685033e-05, |
|
"loss": 0.1097, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.4348814414715023e-05, |
|
"loss": 0.1022, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.424454130257972e-05, |
|
"loss": 0.089, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.4140268190444414e-05, |
|
"loss": 0.0932, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.403599507830911e-05, |
|
"loss": 0.0871, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.3931721966173804e-05, |
|
"loss": 0.1136, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 2.38274488540385e-05, |
|
"loss": 0.0951, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 2.3723175741903194e-05, |
|
"loss": 0.098, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 2.361890262976789e-05, |
|
"loss": 0.0828, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 2.3514629517632584e-05, |
|
"loss": 0.1078, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 2.341035640549728e-05, |
|
"loss": 0.0896, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 2.3306083293361974e-05, |
|
"loss": 0.0901, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 2.3201810181226673e-05, |
|
"loss": 0.0887, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 2.3097537069091364e-05, |
|
"loss": 0.0915, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 2.299326395695606e-05, |
|
"loss": 0.0857, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 2.2888990844820758e-05, |
|
"loss": 0.1049, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 2.278471773268545e-05, |
|
"loss": 0.1008, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 2.2680444620550145e-05, |
|
"loss": 0.0904, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 2.2576171508414843e-05, |
|
"loss": 0.0968, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 2.2471898396279535e-05, |
|
"loss": 0.0824, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 2.236762528414423e-05, |
|
"loss": 0.0866, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 2.226335217200893e-05, |
|
"loss": 0.0836, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.2159079059873624e-05, |
|
"loss": 0.1039, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.2054805947738315e-05, |
|
"loss": 0.0776, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.1950532835603014e-05, |
|
"loss": 0.0774, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.184625972346771e-05, |
|
"loss": 0.0937, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.17419866113324e-05, |
|
"loss": 0.0965, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.16377134991971e-05, |
|
"loss": 0.0917, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.1533440387061794e-05, |
|
"loss": 0.0817, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.1429167274926486e-05, |
|
"loss": 0.0769, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1324894162791184e-05, |
|
"loss": 0.1112, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.122062105065588e-05, |
|
"loss": 0.0763, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 2.1116347938520574e-05, |
|
"loss": 0.0891, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.101207482638527e-05, |
|
"loss": 0.0885, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.0907801714249965e-05, |
|
"loss": 0.0709, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.080352860211466e-05, |
|
"loss": 0.0844, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.0699255489979355e-05, |
|
"loss": 0.0868, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.059498237784405e-05, |
|
"loss": 0.0867, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 2.0490709265708745e-05, |
|
"loss": 0.0902, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 2.038643615357344e-05, |
|
"loss": 0.0807, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 2.0282163041438135e-05, |
|
"loss": 0.0819, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.017788992930283e-05, |
|
"loss": 0.0979, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.0073616817167525e-05, |
|
"loss": 0.0796, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 4.9969343705032224e-05, |
|
"loss": 0.0773, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.9865070592896916e-05, |
|
"loss": 0.0796, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 4.9760797480761615e-05, |
|
"loss": 0.0831, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.9656524368626306e-05, |
|
"loss": 0.0935, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.9552251256491005e-05, |
|
"loss": 0.0973, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.94479781443557e-05, |
|
"loss": 0.1146, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.9343705032220395e-05, |
|
"loss": 0.1104, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.923943192008509e-05, |
|
"loss": 0.109, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 4.9135158807949785e-05, |
|
"loss": 0.1118, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.903088569581448e-05, |
|
"loss": 0.1169, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 4.8926612583679175e-05, |
|
"loss": 0.1463, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.8822339471543874e-05, |
|
"loss": 0.1227, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.8718066359408565e-05, |
|
"loss": 0.1233, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.861379324727326e-05, |
|
"loss": 0.1258, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 4.8509520135137956e-05, |
|
"loss": 0.1439, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.8405247023002654e-05, |
|
"loss": 0.1225, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.8300973910867346e-05, |
|
"loss": 0.1204, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.8196700798732044e-05, |
|
"loss": 0.1256, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.8092427686596736e-05, |
|
"loss": 0.1498, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 4.798815457446143e-05, |
|
"loss": 0.1416, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.7883881462326126e-05, |
|
"loss": 0.1263, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 4.7779608350190825e-05, |
|
"loss": 0.1285, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 4.7675335238055516e-05, |
|
"loss": 0.1294, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.7571062125920215e-05, |
|
"loss": 0.1491, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.7466789013784906e-05, |
|
"loss": 0.1495, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 4.73625159016496e-05, |
|
"loss": 0.1414, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 4.7258242789514297e-05, |
|
"loss": 0.1384, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.7153969677378995e-05, |
|
"loss": 0.1376, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 4.704969656524369e-05, |
|
"loss": 0.1456, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 4.6945423453108385e-05, |
|
"loss": 0.1326, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 4.684115034097308e-05, |
|
"loss": 0.1676, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 4.6736877228837775e-05, |
|
"loss": 0.1485, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 4.6632604116702474e-05, |
|
"loss": 0.1319, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 4.6528331004567166e-05, |
|
"loss": 0.1349, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 4.642405789243186e-05, |
|
"loss": 0.1386, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.6319784780296556e-05, |
|
"loss": 0.1562, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 4.621551166816125e-05, |
|
"loss": 0.1441, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.6111238556025946e-05, |
|
"loss": 0.1413, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.6006965443890644e-05, |
|
"loss": 0.1614, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.5902692331755336e-05, |
|
"loss": 0.1516, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.5798419219620035e-05, |
|
"loss": 0.1448, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.5694146107484726e-05, |
|
"loss": 0.1514, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.558987299534942e-05, |
|
"loss": 0.1558, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.5485599883214116e-05, |
|
"loss": 0.1487, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.5381326771078815e-05, |
|
"loss": 0.1487, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 4.5277053658943507e-05, |
|
"loss": 0.1447, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.5172780546808205e-05, |
|
"loss": 0.1729, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 4.50685074346729e-05, |
|
"loss": 0.1425, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.496423432253759e-05, |
|
"loss": 0.1562, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.4859961210402294e-05, |
|
"loss": 0.1481, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.4755688098266985e-05, |
|
"loss": 0.1466, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.465141498613168e-05, |
|
"loss": 0.133, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.4547141873996376e-05, |
|
"loss": 0.1485, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.444286876186107e-05, |
|
"loss": 0.1332, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 4.433859564972576e-05, |
|
"loss": 0.1463, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 4.4234322537590464e-05, |
|
"loss": 0.1279, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 4.4130049425455156e-05, |
|
"loss": 0.1312, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 4.402577631331985e-05, |
|
"loss": 0.1387, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 4.3921503201184546e-05, |
|
"loss": 0.1305, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.381723008904924e-05, |
|
"loss": 0.1403, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.371295697691393e-05, |
|
"loss": 0.1462, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.3608683864778635e-05, |
|
"loss": 0.1404, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.3504410752643326e-05, |
|
"loss": 0.1384, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.340013764050802e-05, |
|
"loss": 0.1396, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 4.3295864528372717e-05, |
|
"loss": 0.1235, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 4.319159141623741e-05, |
|
"loss": 0.1337, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 4.308731830410211e-05, |
|
"loss": 0.1382, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 4.2983045191966805e-05, |
|
"loss": 0.1376, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 4.28787720798315e-05, |
|
"loss": 0.1458, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 4.277449896769619e-05, |
|
"loss": 0.1541, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 4.267022585556089e-05, |
|
"loss": 0.1327, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 4.256595274342558e-05, |
|
"loss": 0.1395, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 4.246167963129028e-05, |
|
"loss": 0.1272, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 4.2357406519154976e-05, |
|
"loss": 0.1276, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 4.225313340701967e-05, |
|
"loss": 0.1519, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 4.2148860294884366e-05, |
|
"loss": 0.1236, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 4.204458718274906e-05, |
|
"loss": 0.1173, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 4.194031407061375e-05, |
|
"loss": 0.144, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 4.183604095847845e-05, |
|
"loss": 0.1489, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.1731767846343146e-05, |
|
"loss": 0.1405, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 4.162749473420784e-05, |
|
"loss": 0.12, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 4.1523221622072536e-05, |
|
"loss": 0.1278, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 4.141894850993723e-05, |
|
"loss": 0.1298, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 4.1314675397801927e-05, |
|
"loss": 0.1269, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 4.1210402285666625e-05, |
|
"loss": 0.1278, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 4.110612917353132e-05, |
|
"loss": 0.129, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.100185606139601e-05, |
|
"loss": 0.1369, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.089758294926071e-05, |
|
"loss": 0.1337, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 4.07933098371254e-05, |
|
"loss": 0.1352, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 4.06890367249901e-05, |
|
"loss": 0.1133, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 4.0584763612854795e-05, |
|
"loss": 0.1498, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 4.048049050071949e-05, |
|
"loss": 0.1344, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.037621738858418e-05, |
|
"loss": 0.1467, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 4.027194427644888e-05, |
|
"loss": 0.1235, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 4.016767116431357e-05, |
|
"loss": 0.1333, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 4.006339805217827e-05, |
|
"loss": 0.1198, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 4.9959124940042965e-05, |
|
"loss": 0.0767, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 4.985485182790766e-05, |
|
"loss": 0.0963, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 4.9750578715772355e-05, |
|
"loss": 0.1067, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 4.964630560363705e-05, |
|
"loss": 0.1075, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 4.9542032491501745e-05, |
|
"loss": 0.1101, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 4.943775937936644e-05, |
|
"loss": 0.1055, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 4.9333486267231136e-05, |
|
"loss": 0.103, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 4.922921315509583e-05, |
|
"loss": 0.114, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 4.9124940042960526e-05, |
|
"loss": 0.1182, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 4.9020666930825224e-05, |
|
"loss": 0.1224, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 4.8916393818689916e-05, |
|
"loss": 0.1168, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 4.881212070655461e-05, |
|
"loss": 0.1395, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 4.8707847594419306e-05, |
|
"loss": 0.1203, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 4.8603574482284005e-05, |
|
"loss": 0.1176, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 4.8499301370148696e-05, |
|
"loss": 0.1254, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 4.8395028258013395e-05, |
|
"loss": 0.1148, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 4.8290755145878086e-05, |
|
"loss": 0.1202, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 4.818648203374278e-05, |
|
"loss": 0.1228, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 4.808220892160748e-05, |
|
"loss": 0.1249, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 4.7977935809472175e-05, |
|
"loss": 0.1413, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 4.787366269733687e-05, |
|
"loss": 0.151, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 4.7769389585201565e-05, |
|
"loss": 0.1217, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 4.766511647306626e-05, |
|
"loss": 0.1251, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 4.756084336093095e-05, |
|
"loss": 0.1299, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 4.745657024879565e-05, |
|
"loss": 0.1337, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 4.7352297136660346e-05, |
|
"loss": 0.1249, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 4.724802402452504e-05, |
|
"loss": 0.1273, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 4.7143750912389736e-05, |
|
"loss": 0.123, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 4.703947780025443e-05, |
|
"loss": 0.1142, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 4.693520468811912e-05, |
|
"loss": 0.1485, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 4.683093157598382e-05, |
|
"loss": 0.1247, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 4.6726658463848516e-05, |
|
"loss": 0.1238, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 4.662238535171321e-05, |
|
"loss": 0.1242, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 4.6518112239577906e-05, |
|
"loss": 0.1253, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 4.64138391274426e-05, |
|
"loss": 0.1325, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 4.630956601530729e-05, |
|
"loss": 0.1258, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 4.6205292903171995e-05, |
|
"loss": 0.1184, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 4.6101019791036687e-05, |
|
"loss": 0.1195, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 4.599674667890138e-05, |
|
"loss": 0.1382, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 4.589247356676608e-05, |
|
"loss": 0.138, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 4.578820045463077e-05, |
|
"loss": 0.1297, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 4.568392734249547e-05, |
|
"loss": 0.1161, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 4.5579654230360165e-05, |
|
"loss": 0.1273, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 4.547538111822486e-05, |
|
"loss": 0.1513, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 4.537110800608955e-05, |
|
"loss": 0.1043, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 4.526683489395425e-05, |
|
"loss": 0.1333, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 4.516256178181894e-05, |
|
"loss": 0.1228, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 4.505828866968364e-05, |
|
"loss": 0.1255, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 4.4954015557548336e-05, |
|
"loss": 0.1047, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 4.484974244541303e-05, |
|
"loss": 0.1412, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.4745469333277726e-05, |
|
"loss": 0.1399, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 4.464119622114242e-05, |
|
"loss": 0.12, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.453692310900711e-05, |
|
"loss": 0.1273, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.443264999687181e-05, |
|
"loss": 0.1154, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.4328376884736506e-05, |
|
"loss": 0.1364, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 4.42241037726012e-05, |
|
"loss": 0.1231, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 4.4119830660465897e-05, |
|
"loss": 0.1127, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 4.401555754833059e-05, |
|
"loss": 0.1183, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 4.391128443619528e-05, |
|
"loss": 0.1202, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 4.3807011324059985e-05, |
|
"loss": 0.1313, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 4.370273821192468e-05, |
|
"loss": 0.1213, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.359846509978937e-05, |
|
"loss": 0.1199, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 4.349419198765407e-05, |
|
"loss": 0.1051, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 4.338991887551876e-05, |
|
"loss": 0.1251, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 4.328564576338345e-05, |
|
"loss": 0.1292, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 4.3181372651248156e-05, |
|
"loss": 0.1228, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 4.307709953911285e-05, |
|
"loss": 0.1158, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 4.297282642697754e-05, |
|
"loss": 0.1084, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 4.286855331484224e-05, |
|
"loss": 0.1183, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 4.276428020270693e-05, |
|
"loss": 0.1103, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 4.266000709057163e-05, |
|
"loss": 0.1037, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 4.2555733978436326e-05, |
|
"loss": 0.1093, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 4.245146086630102e-05, |
|
"loss": 0.1005, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 4.234718775416571e-05, |
|
"loss": 0.1307, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 4.224291464203041e-05, |
|
"loss": 0.125, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 4.21386415298951e-05, |
|
"loss": 0.113, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 4.20343684177598e-05, |
|
"loss": 0.1093, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 4.19300953056245e-05, |
|
"loss": 0.1135, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 4.182582219348919e-05, |
|
"loss": 0.1086, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 4.172154908135388e-05, |
|
"loss": 0.1282, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 4.161727596921858e-05, |
|
"loss": 0.0988, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 4.151300285708327e-05, |
|
"loss": 0.1118, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 4.140872974494797e-05, |
|
"loss": 0.1233, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 4.130445663281267e-05, |
|
"loss": 0.1327, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 4.120018352067736e-05, |
|
"loss": 0.1061, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 4.109591040854206e-05, |
|
"loss": 0.1085, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 4.099163729640675e-05, |
|
"loss": 0.0944, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 4.088736418427145e-05, |
|
"loss": 0.1009, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 4.078309107213614e-05, |
|
"loss": 0.1104, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 4.067881796000084e-05, |
|
"loss": 0.1201, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 4.057454484786553e-05, |
|
"loss": 0.1067, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 4.047027173573023e-05, |
|
"loss": 0.117, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 4.036599862359492e-05, |
|
"loss": 0.1221, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 4.026172551145962e-05, |
|
"loss": 0.1082, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 4.0157452399324317e-05, |
|
"loss": 0.1087, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 4.005317928718901e-05, |
|
"loss": 0.1024, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 239755, |
|
"total_flos": 3.624187009725235e+16, |
|
"train_loss": 0.02392078927181249, |
|
"train_runtime": 8161.1668, |
|
"train_samples_per_second": 117.509, |
|
"train_steps_per_second": 29.378 |
|
} |
|
], |
|
"max_steps": 239755, |
|
"num_train_epochs": 5, |
|
"total_flos": 3.624187009725235e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|