|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.1828941378787112, |
|
"global_step": 1461, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6e-06, |
|
"loss": 1.9846, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.8345, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.7106, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.6318, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3e-05, |
|
"loss": 1.5838, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.9999690442167746e-05, |
|
"loss": 1.6664, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.999876178144779e-05, |
|
"loss": 1.5785, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.9997214056170024e-05, |
|
"loss": 1.6536, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.9995047330215847e-05, |
|
"loss": 1.5866, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.9992261693015524e-05, |
|
"loss": 1.6324, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.9988857259544498e-05, |
|
"loss": 1.6263, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.9984834170318635e-05, |
|
"loss": 1.6481, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.9980192591388452e-05, |
|
"loss": 1.6703, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.997493271433222e-05, |
|
"loss": 1.6566, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.9969054756248093e-05, |
|
"loss": 1.6168, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.9962558959745133e-05, |
|
"loss": 1.6836, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.9955445592933296e-05, |
|
"loss": 1.7089, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.994771494941238e-05, |
|
"loss": 1.6573, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.9939367348259873e-05, |
|
"loss": 1.5558, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.9930403134017823e-05, |
|
"loss": 1.6069, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.992082267667859e-05, |
|
"loss": 1.6357, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.9910626371669593e-05, |
|
"loss": 1.5968, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.9899814639836972e-05, |
|
"loss": 1.6244, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.9888387927428234e-05, |
|
"loss": 1.5168, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.987634670607381e-05, |
|
"loss": 1.6162, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.9863691472767633e-05, |
|
"loss": 1.6014, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.9850422749846577e-05, |
|
"loss": 1.6596, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.9836541084968914e-05, |
|
"loss": 1.6175, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.9822047051091735e-05, |
|
"loss": 1.6219, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.9806941246447258e-05, |
|
"loss": 1.5605, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.9791224294518173e-05, |
|
"loss": 1.6062, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.9774896844011887e-05, |
|
"loss": 1.5561, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.9757959568833758e-05, |
|
"loss": 1.5929, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.9740413168059278e-05, |
|
"loss": 1.6087, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.9722258365905223e-05, |
|
"loss": 1.603, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.9703495911699746e-05, |
|
"loss": 1.5866, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.9684126579851468e-05, |
|
"loss": 1.696, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.9664151169817515e-05, |
|
"loss": 1.59, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.9643570506070493e-05, |
|
"loss": 1.5724, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.9622385438064493e-05, |
|
"loss": 1.5482, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.9600596840200022e-05, |
|
"loss": 1.6198, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.9578205611787877e-05, |
|
"loss": 1.6246, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.955521267701209e-05, |
|
"loss": 1.6953, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.9531618984891737e-05, |
|
"loss": 1.6397, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.9507425509241757e-05, |
|
"loss": 1.5797, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.948263324863282e-05, |
|
"loss": 1.63, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.945724322635004e-05, |
|
"loss": 1.5258, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.9431256490350795e-05, |
|
"loss": 1.6107, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.9404674113221433e-05, |
|
"loss": 1.5577, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.937749719213303e-05, |
|
"loss": 1.6001, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.9349726848796083e-05, |
|
"loss": 1.6326, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.932136422941424e-05, |
|
"loss": 1.5575, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.929241050463696e-05, |
|
"loss": 1.692, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.926286686951121e-05, |
|
"loss": 1.5985, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.9232734543432146e-05, |
|
"loss": 1.5635, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.920201477009277e-05, |
|
"loss": 1.6745, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.9170708817432612e-05, |
|
"loss": 1.5613, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.9138817977585383e-05, |
|
"loss": 1.6063, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.910634356682565e-05, |
|
"loss": 1.6118, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.9073286925514504e-05, |
|
"loss": 1.5912, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.9039649418044247e-05, |
|
"loss": 1.596, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.900543243278206e-05, |
|
"loss": 1.5501, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.8970637382012714e-05, |
|
"loss": 1.6077, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.8935265701880277e-05, |
|
"loss": 1.5393, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.8899318852328833e-05, |
|
"loss": 1.5622, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.8862798317042222e-05, |
|
"loss": 1.5683, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.882570560338281e-05, |
|
"loss": 1.5644, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.878804224232927e-05, |
|
"loss": 1.6027, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.8749809788413383e-05, |
|
"loss": 1.5418, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.87110098196559e-05, |
|
"loss": 1.5934, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.8671643937501375e-05, |
|
"loss": 1.5265, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.8631713766752097e-05, |
|
"loss": 1.6044, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.859122095550101e-05, |
|
"loss": 1.6244, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.8550167175063705e-05, |
|
"loss": 1.5721, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.850855411990941e-05, |
|
"loss": 1.5572, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.8466383507591083e-05, |
|
"loss": 1.5034, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.84236570786745e-05, |
|
"loss": 1.6026, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8380376596666425e-05, |
|
"loss": 1.6428, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.833654384794181e-05, |
|
"loss": 1.5364, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8292160641670088e-05, |
|
"loss": 1.5064, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.8247228809740468e-05, |
|
"loss": 1.6338, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.820175020668635e-05, |
|
"loss": 1.5189, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.8155726709608777e-05, |
|
"loss": 1.5359, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.810916021809894e-05, |
|
"loss": 1.6113, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.8062052654159797e-05, |
|
"loss": 1.5724, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.8014405962126735e-05, |
|
"loss": 1.5174, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7966222108587307e-05, |
|
"loss": 1.5741, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.7917503082300086e-05, |
|
"loss": 1.595, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.7868250894112555e-05, |
|
"loss": 1.6298, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.7818467576878136e-05, |
|
"loss": 1.5871, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.776815518537226e-05, |
|
"loss": 1.5841, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.7717315796207576e-05, |
|
"loss": 1.5128, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.7665951507748223e-05, |
|
"loss": 1.5055, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.7614064440023254e-05, |
|
"loss": 1.6022, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.7561656734639085e-05, |
|
"loss": 1.5673, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.7508730554691145e-05, |
|
"loss": 1.5504, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.7455288084674565e-05, |
|
"loss": 1.6085, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.7401331530394037e-05, |
|
"loss": 1.5598, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.7346863118872766e-05, |
|
"loss": 1.5559, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.729188509826053e-05, |
|
"loss": 1.5366, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.7236399737740912e-05, |
|
"loss": 1.5431, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.7180409327437648e-05, |
|
"loss": 1.58, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.712391617832006e-05, |
|
"loss": 1.5656, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.7066922622107726e-05, |
|
"loss": 1.5488, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.700943101117421e-05, |
|
"loss": 1.563, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.6951443718449966e-05, |
|
"loss": 1.5845, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.689296313732442e-05, |
|
"loss": 1.5782, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.6833991681547158e-05, |
|
"loss": 1.6135, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.677453178512831e-05, |
|
"loss": 1.5745, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.6714585902238105e-05, |
|
"loss": 1.6138, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.6654156507105543e-05, |
|
"loss": 1.5663, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.6593246093916307e-05, |
|
"loss": 1.5842, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.653185717670978e-05, |
|
"loss": 1.5424, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.6469992289275325e-05, |
|
"loss": 1.5542, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.6407653985047665e-05, |
|
"loss": 1.5571, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.6344844837001508e-05, |
|
"loss": 1.5838, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.6281567437545347e-05, |
|
"loss": 1.5626, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6217824398414473e-05, |
|
"loss": 1.5316, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.615361835056314e-05, |
|
"loss": 1.6038, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6088951944056024e-05, |
|
"loss": 1.6042, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6023827847958802e-05, |
|
"loss": 1.5683, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5958248750228018e-05, |
|
"loss": 1.5166, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5892217357600104e-05, |
|
"loss": 1.5522, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5825736395479708e-05, |
|
"loss": 1.6122, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5758808607827153e-05, |
|
"loss": 1.5722, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5691436757045232e-05, |
|
"loss": 1.5333, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5623623623865152e-05, |
|
"loss": 1.563, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.5555372007231777e-05, |
|
"loss": 1.5318, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.548668472418811e-05, |
|
"loss": 1.548, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.5417564609759005e-05, |
|
"loss": 1.5749, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.5348014516834175e-05, |
|
"loss": 1.6166, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.5278037316050417e-05, |
|
"loss": 1.61, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.5207635895673138e-05, |
|
"loss": 1.4852, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.513681316147715e-05, |
|
"loss": 1.5476, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.506557203662673e-05, |
|
"loss": 1.6291, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.4993915461554974e-05, |
|
"loss": 1.5232, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.4921846393842414e-05, |
|
"loss": 1.5827, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.484936780809497e-05, |
|
"loss": 1.5019, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.4776482695821154e-05, |
|
"loss": 1.5315, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.470319406530862e-05, |
|
"loss": 1.5725, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.4629504941499984e-05, |
|
"loss": 1.6135, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.4555418365867965e-05, |
|
"loss": 1.556, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.4480937396289856e-05, |
|
"loss": 1.5323, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.4406065106921332e-05, |
|
"loss": 1.5726, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.4330804588069536e-05, |
|
"loss": 1.6077, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.4255158946065542e-05, |
|
"loss": 1.5959, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.4179131303136146e-05, |
|
"loss": 1.5373, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.4102724797274994e-05, |
|
"loss": 1.5624, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.4025942582113067e-05, |
|
"loss": 1.5431, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.3948787826788495e-05, |
|
"loss": 1.5807, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.3871263715815802e-05, |
|
"loss": 1.5659, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.3793373448954406e-05, |
|
"loss": 1.5356, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.3715120241076602e-05, |
|
"loss": 1.5181, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.3636507322034844e-05, |
|
"loss": 1.6218, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.355753793652844e-05, |
|
"loss": 1.6171, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.3478215343969623e-05, |
|
"loss": 1.5306, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.3398542818349042e-05, |
|
"loss": 1.5147, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.33185236481006e-05, |
|
"loss": 1.5031, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.323816113596575e-05, |
|
"loss": 1.5471, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.3157458598857164e-05, |
|
"loss": 1.5904, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.3076419367721834e-05, |
|
"loss": 1.6055, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.299504678740359e-05, |
|
"loss": 1.6248, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.2913344216505043e-05, |
|
"loss": 1.5905, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.283131502724896e-05, |
|
"loss": 1.4956, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.2748962605339066e-05, |
|
"loss": 1.537, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.266629034982033e-05, |
|
"loss": 1.5609, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.2583301672938648e-05, |
|
"loss": 1.6342, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.25e-05, |
|
"loss": 1.6218, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.24163887692291e-05, |
|
"loss": 1.6035, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.233247143162746e-05, |
|
"loss": 1.4957, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.224825145083096e-05, |
|
"loss": 1.4736, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.216373230296689e-05, |
|
"loss": 1.581, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.2078917476510483e-05, |
|
"loss": 1.6488, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.1993810472140908e-05, |
|
"loss": 1.5308, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.190841480259681e-05, |
|
"loss": 1.4948, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.1822733992531294e-05, |
|
"loss": 1.5796, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.1736771578366472e-05, |
|
"loss": 1.6163, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.1650531108147493e-05, |
|
"loss": 1.5086, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.1564016141396093e-05, |
|
"loss": 1.564, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.1477230248963675e-05, |
|
"loss": 1.6207, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.139017701288394e-05, |
|
"loss": 1.5649, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.1302860026225027e-05, |
|
"loss": 1.5835, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.121528289294122e-05, |
|
"loss": 1.5632, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.1127449227724186e-05, |
|
"loss": 1.5381, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.1039362655853796e-05, |
|
"loss": 1.5546, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.0951026813048475e-05, |
|
"loss": 1.4658, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.0862445345315165e-05, |
|
"loss": 1.5073, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.0773621908798818e-05, |
|
"loss": 1.5564, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.068456016963149e-05, |
|
"loss": 1.6254, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.0595263803781037e-05, |
|
"loss": 1.5497, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.050573649689938e-05, |
|
"loss": 1.5791, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.0415981944170405e-05, |
|
"loss": 1.4777, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.0326003850157408e-05, |
|
"loss": 1.5316, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.0235805928650214e-05, |
|
"loss": 1.5288, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.0145391902511905e-05, |
|
"loss": 1.5519, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.0054765503525136e-05, |
|
"loss": 1.5359, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9963930472238126e-05, |
|
"loss": 1.5367, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9872890557810258e-05, |
|
"loss": 1.5804, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.978164951785733e-05, |
|
"loss": 1.6531, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.96902111182965e-05, |
|
"loss": 1.6579, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.959857913319078e-05, |
|
"loss": 1.6308, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.9506757344593345e-05, |
|
"loss": 1.6248, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.941474954239136e-05, |
|
"loss": 1.5152, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.9322559524149603e-05, |
|
"loss": 1.5966, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.92301910949537e-05, |
|
"loss": 1.5624, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.9137648067253087e-05, |
|
"loss": 1.6706, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.9044934260703623e-05, |
|
"loss": 1.566, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.895205350200998e-05, |
|
"loss": 1.508, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.885900962476767e-05, |
|
"loss": 1.6144, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.8765806469304814e-05, |
|
"loss": 1.6291, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.8672447882523644e-05, |
|
"loss": 1.6088, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.8578937717741727e-05, |
|
"loss": 1.5956, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.8485279834532923e-05, |
|
"loss": 1.6554, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.839147809856807e-05, |
|
"loss": 1.6333, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8297536381455434e-05, |
|
"loss": 1.6016, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8203458560580934e-05, |
|
"loss": 1.5765, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.810924851894807e-05, |
|
"loss": 1.4411, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.801491014501768e-05, |
|
"loss": 1.6064, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.7920447332547423e-05, |
|
"loss": 1.5165, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.7825863980431106e-05, |
|
"loss": 1.628, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.773116399253772e-05, |
|
"loss": 1.5792, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.7636351277550324e-05, |
|
"loss": 1.5908, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.7541429748804722e-05, |
|
"loss": 1.5477, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.744640332412794e-05, |
|
"loss": 1.6118, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.7351275925676517e-05, |
|
"loss": 1.6034, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.725605147977461e-05, |
|
"loss": 1.6053, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.716073391675197e-05, |
|
"loss": 1.5638, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.7065327170781678e-05, |
|
"loss": 1.5809, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.69698351797178e-05, |
|
"loss": 1.5822, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6874261884932842e-05, |
|
"loss": 1.5981, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.677861123115506e-05, |
|
"loss": 1.5995, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.668288716630566e-05, |
|
"loss": 1.5323, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6587093641335844e-05, |
|
"loss": 1.5981, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6491234610063732e-05, |
|
"loss": 1.6448, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6395314029011184e-05, |
|
"loss": 1.63, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6299335857240484e-05, |
|
"loss": 1.6529, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6203304056190938e-05, |
|
"loss": 1.6172, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6107222589515376e-05, |
|
"loss": 1.5247, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.601109542291652e-05, |
|
"loss": 1.5932, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.591492652398336e-05, |
|
"loss": 1.6186, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5818719862027338e-05, |
|
"loss": 1.6524, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.572247940791856e-05, |
|
"loss": 1.611, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5626209133921863e-05, |
|
"loss": 1.5913, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5529913013532894e-05, |
|
"loss": 1.7348, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.543359502131409e-05, |
|
"loss": 1.719, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.533725913273064e-05, |
|
"loss": 1.6274, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5240909323986384e-05, |
|
"loss": 1.5266, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 3.4220237731933594, |
|
"eval_runtime": 954.9678, |
|
"eval_samples_per_second": 9.114, |
|
"eval_steps_per_second": 2.279, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5144549571859711e-05, |
|
"loss": 1.3304, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5048183853539425e-05, |
|
"loss": 1.0536, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.4951816146460574e-05, |
|
"loss": 1.0564, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.4855450428140291e-05, |
|
"loss": 1.0105, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.4759090676013616e-05, |
|
"loss": 1.0252, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.4662740867269361e-05, |
|
"loss": 1.0029, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.4566404978685912e-05, |
|
"loss": 0.9685, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.447008698646711e-05, |
|
"loss": 1.0095, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.4373790866078143e-05, |
|
"loss": 0.9759, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.4277520592081442e-05, |
|
"loss": 1.0217, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.4181280137972666e-05, |
|
"loss": 0.9472, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.4085073476016642e-05, |
|
"loss": 1.0432, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.3988904577083481e-05, |
|
"loss": 1.0355, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.389277741048463e-05, |
|
"loss": 0.9971, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.3796695943809063e-05, |
|
"loss": 0.9947, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.3700664142759521e-05, |
|
"loss": 1.0532, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.3604685970988817e-05, |
|
"loss": 0.9791, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.3508765389936272e-05, |
|
"loss": 1.0299, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.341290635866416e-05, |
|
"loss": 1.036, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.3317112833694344e-05, |
|
"loss": 1.0534, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.3221388768844937e-05, |
|
"loss": 1.0144, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.3125738115067159e-05, |
|
"loss": 1.0696, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.30301648202822e-05, |
|
"loss": 1.0651, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.2934672829218327e-05, |
|
"loss": 1.0598, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.283926608324804e-05, |
|
"loss": 1.0514, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.2743948520225391e-05, |
|
"loss": 0.9963, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.2648724074323492e-05, |
|
"loss": 1.0307, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.255359667587206e-05, |
|
"loss": 1.0354, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.2458570251195279e-05, |
|
"loss": 1.0965, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.2363648722449679e-05, |
|
"loss": 1.0484, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.2268836007462284e-05, |
|
"loss": 1.0421, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.21741360195689e-05, |
|
"loss": 1.0695, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.207955266745258e-05, |
|
"loss": 1.0323, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.198508985498233e-05, |
|
"loss": 1.0874, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.1890751481051933e-05, |
|
"loss": 1.0702, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.179654143941907e-05, |
|
"loss": 1.0211, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.1702463618544562e-05, |
|
"loss": 1.0252, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.1608521901431932e-05, |
|
"loss": 1.0588, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.1514720165467076e-05, |
|
"loss": 1.0644, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.1421062282258276e-05, |
|
"loss": 1.0831, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.1327552117476363e-05, |
|
"loss": 1.0262, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.1234193530695189e-05, |
|
"loss": 1.0775, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.1140990375232336e-05, |
|
"loss": 1.0476, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.1047946497990016e-05, |
|
"loss": 1.0373, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.0955065739296378e-05, |
|
"loss": 1.0734, |
|
"step": 1460 |
|
} |
|
], |
|
"max_steps": 2470, |
|
"num_train_epochs": 2, |
|
"total_flos": 3.470813417302917e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|