|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"eval_steps": 500, |
|
"global_step": 1340, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.007462686567164179, |
|
"grad_norm": 138.84262084960938, |
|
"learning_rate": 1.4925373134328358e-06, |
|
"loss": 2.7919, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.03731343283582089, |
|
"grad_norm": 108.88359832763672, |
|
"learning_rate": 7.4626865671641785e-06, |
|
"loss": 2.7342, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.07462686567164178, |
|
"grad_norm": 13.541873931884766, |
|
"learning_rate": 1.4925373134328357e-05, |
|
"loss": 2.3501, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11194029850746269, |
|
"grad_norm": 7.267947673797607, |
|
"learning_rate": 2.238805970149254e-05, |
|
"loss": 1.903, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.14925373134328357, |
|
"grad_norm": 4.06025505065918, |
|
"learning_rate": 2.9850746268656714e-05, |
|
"loss": 1.5868, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1865671641791045, |
|
"grad_norm": 1.833858847618103, |
|
"learning_rate": 3.73134328358209e-05, |
|
"loss": 1.3748, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.22388059701492538, |
|
"grad_norm": 1.4598793983459473, |
|
"learning_rate": 4.477611940298508e-05, |
|
"loss": 1.2384, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.26119402985074625, |
|
"grad_norm": 1.4846118688583374, |
|
"learning_rate": 5.223880597014925e-05, |
|
"loss": 1.149, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.29850746268656714, |
|
"grad_norm": 0.9875138998031616, |
|
"learning_rate": 5.970149253731343e-05, |
|
"loss": 1.0731, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.3358208955223881, |
|
"grad_norm": 1.0995197296142578, |
|
"learning_rate": 6.716417910447762e-05, |
|
"loss": 1.0446, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.373134328358209, |
|
"grad_norm": 0.6559099555015564, |
|
"learning_rate": 7.46268656716418e-05, |
|
"loss": 1.014, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.41044776119402987, |
|
"grad_norm": 1.1354745626449585, |
|
"learning_rate": 8.208955223880597e-05, |
|
"loss": 0.994, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.44776119402985076, |
|
"grad_norm": 0.49952608346939087, |
|
"learning_rate": 8.955223880597016e-05, |
|
"loss": 0.9708, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.48507462686567165, |
|
"grad_norm": 0.8550089597702026, |
|
"learning_rate": 9.701492537313434e-05, |
|
"loss": 0.9634, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5223880597014925, |
|
"grad_norm": 0.5180774927139282, |
|
"learning_rate": 0.0001044776119402985, |
|
"loss": 0.951, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.5597014925373134, |
|
"grad_norm": 0.6518343687057495, |
|
"learning_rate": 0.00011194029850746269, |
|
"loss": 0.9374, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.5970149253731343, |
|
"grad_norm": 0.9855380654335022, |
|
"learning_rate": 0.00011940298507462686, |
|
"loss": 0.9345, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.6343283582089553, |
|
"grad_norm": 1.1707885265350342, |
|
"learning_rate": 0.00012686567164179105, |
|
"loss": 0.9269, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.6716417910447762, |
|
"grad_norm": 0.527793824672699, |
|
"learning_rate": 0.00013432835820895525, |
|
"loss": 0.9095, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.7089552238805971, |
|
"grad_norm": 0.678547203540802, |
|
"learning_rate": 0.00014179104477611942, |
|
"loss": 0.9077, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.746268656716418, |
|
"grad_norm": 1.1332663297653198, |
|
"learning_rate": 0.0001492537313432836, |
|
"loss": 0.8974, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.7835820895522388, |
|
"grad_norm": 0.5298197865486145, |
|
"learning_rate": 0.00015671641791044778, |
|
"loss": 0.9071, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.8208955223880597, |
|
"grad_norm": 0.5773002505302429, |
|
"learning_rate": 0.00016417910447761195, |
|
"loss": 0.8858, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.8582089552238806, |
|
"grad_norm": 0.6888121962547302, |
|
"learning_rate": 0.00017164179104477614, |
|
"loss": 0.8891, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.8955223880597015, |
|
"grad_norm": 0.550254762172699, |
|
"learning_rate": 0.0001791044776119403, |
|
"loss": 0.8757, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.9328358208955224, |
|
"grad_norm": 0.5638606548309326, |
|
"learning_rate": 0.00018656716417910448, |
|
"loss": 0.872, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.9701492537313433, |
|
"grad_norm": 0.826892614364624, |
|
"learning_rate": 0.00019402985074626867, |
|
"loss": 0.8763, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.4227064847946167, |
|
"eval_runtime": 0.5192, |
|
"eval_samples_per_second": 3.852, |
|
"eval_steps_per_second": 1.926, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.007462686567164, |
|
"grad_norm": 0.6830571293830872, |
|
"learning_rate": 0.00019999966070700876, |
|
"loss": 0.8669, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.044776119402985, |
|
"grad_norm": 0.6296478509902954, |
|
"learning_rate": 0.0001999877856940653, |
|
"loss": 0.8547, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.0820895522388059, |
|
"grad_norm": 0.7137119770050049, |
|
"learning_rate": 0.00019995894833388465, |
|
"loss": 0.8498, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.1194029850746268, |
|
"grad_norm": 0.44721338152885437, |
|
"learning_rate": 0.00019991315351855748, |
|
"loss": 0.848, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.1567164179104479, |
|
"grad_norm": 0.4353409707546234, |
|
"learning_rate": 0.0001998504090169083, |
|
"loss": 0.8289, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.1940298507462686, |
|
"grad_norm": 0.4284411668777466, |
|
"learning_rate": 0.0001997707254731775, |
|
"loss": 0.8298, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.2313432835820897, |
|
"grad_norm": 0.49997657537460327, |
|
"learning_rate": 0.00019967411640521552, |
|
"loss": 0.8305, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.2686567164179103, |
|
"grad_norm": 0.5145930647850037, |
|
"learning_rate": 0.00019956059820218982, |
|
"loss": 0.8165, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.3059701492537314, |
|
"grad_norm": 0.9191280007362366, |
|
"learning_rate": 0.0001994301901218043, |
|
"loss": 0.8228, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.3432835820895521, |
|
"grad_norm": 0.4731077551841736, |
|
"learning_rate": 0.00019928291428703262, |
|
"loss": 0.8218, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.3805970149253732, |
|
"grad_norm": 0.4273313879966736, |
|
"learning_rate": 0.0001991187956823649, |
|
"loss": 0.8277, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.417910447761194, |
|
"grad_norm": 0.512370228767395, |
|
"learning_rate": 0.00019893786214956945, |
|
"loss": 0.8075, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.455223880597015, |
|
"grad_norm": 0.6519188284873962, |
|
"learning_rate": 0.00019874014438296946, |
|
"loss": 0.8178, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.4925373134328357, |
|
"grad_norm": 0.5379275679588318, |
|
"learning_rate": 0.0001985256759242359, |
|
"loss": 0.8134, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.5298507462686568, |
|
"grad_norm": 0.5376815795898438, |
|
"learning_rate": 0.00019829449315669748, |
|
"loss": 0.8005, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.5671641791044775, |
|
"grad_norm": 0.5267529487609863, |
|
"learning_rate": 0.00019804663529916826, |
|
"loss": 0.8021, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.6044776119402986, |
|
"grad_norm": 0.582000732421875, |
|
"learning_rate": 0.00019778214439929452, |
|
"loss": 0.8107, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.6417910447761193, |
|
"grad_norm": 0.4234386086463928, |
|
"learning_rate": 0.0001975010653264216, |
|
"loss": 0.7975, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.6791044776119404, |
|
"grad_norm": 0.4661960005760193, |
|
"learning_rate": 0.00019720344576398203, |
|
"loss": 0.7991, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.716417910447761, |
|
"grad_norm": 0.66214519739151, |
|
"learning_rate": 0.00019688933620140637, |
|
"loss": 0.7966, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.7537313432835822, |
|
"grad_norm": 0.4201449453830719, |
|
"learning_rate": 0.000196558789925558, |
|
"loss": 0.8005, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.7910447761194028, |
|
"grad_norm": 0.4008370637893677, |
|
"learning_rate": 0.00019621186301169315, |
|
"loss": 0.7921, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.828358208955224, |
|
"grad_norm": 0.4507235884666443, |
|
"learning_rate": 0.00019584861431394822, |
|
"loss": 0.7956, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.8656716417910446, |
|
"grad_norm": 0.46443891525268555, |
|
"learning_rate": 0.00019546910545535558, |
|
"loss": 0.7932, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.9029850746268657, |
|
"grad_norm": 0.5451995134353638, |
|
"learning_rate": 0.0001950734008173893, |
|
"loss": 0.7889, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.9402985074626866, |
|
"grad_norm": 0.507540762424469, |
|
"learning_rate": 0.00019466156752904343, |
|
"loss": 0.7794, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.9776119402985075, |
|
"grad_norm": 0.4746926724910736, |
|
"learning_rate": 0.00019423367545544395, |
|
"loss": 0.7756, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.439745306968689, |
|
"eval_runtime": 0.5176, |
|
"eval_samples_per_second": 3.864, |
|
"eval_steps_per_second": 1.932, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.014925373134328, |
|
"grad_norm": 0.6268861889839172, |
|
"learning_rate": 0.00019378979718599645, |
|
"loss": 0.7882, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.0522388059701493, |
|
"grad_norm": 0.549802839756012, |
|
"learning_rate": 0.0001933300080220719, |
|
"loss": 0.7609, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.08955223880597, |
|
"grad_norm": 0.6178936958312988, |
|
"learning_rate": 0.00019285438596423204, |
|
"loss": 0.7603, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.126865671641791, |
|
"grad_norm": 0.41491350531578064, |
|
"learning_rate": 0.00019236301169899713, |
|
"loss": 0.7554, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.1641791044776117, |
|
"grad_norm": 0.5220763683319092, |
|
"learning_rate": 0.000191855968585158, |
|
"loss": 0.759, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.201492537313433, |
|
"grad_norm": 0.5099439024925232, |
|
"learning_rate": 0.0001913333426396346, |
|
"loss": 0.7614, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.2388059701492535, |
|
"grad_norm": 0.5702708959579468, |
|
"learning_rate": 0.00019079522252288386, |
|
"loss": 0.7485, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.2761194029850746, |
|
"grad_norm": 0.4422476887702942, |
|
"learning_rate": 0.00019024169952385885, |
|
"loss": 0.76, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.3134328358208958, |
|
"grad_norm": 0.44903138279914856, |
|
"learning_rate": 0.00018967286754452214, |
|
"loss": 0.7632, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.3507462686567164, |
|
"grad_norm": 0.49397289752960205, |
|
"learning_rate": 0.00018908882308391598, |
|
"loss": 0.7568, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.388059701492537, |
|
"grad_norm": 0.4179913103580475, |
|
"learning_rate": 0.00018848966522179168, |
|
"loss": 0.7596, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.425373134328358, |
|
"grad_norm": 0.4103606045246124, |
|
"learning_rate": 0.0001878754956018014, |
|
"loss": 0.7487, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.4626865671641793, |
|
"grad_norm": 0.41152262687683105, |
|
"learning_rate": 0.00018724641841425478, |
|
"loss": 0.7565, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.4100894629955292, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 0.7538, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.5373134328358207, |
|
"grad_norm": 0.3992711901664734, |
|
"learning_rate": 0.00018594397072453856, |
|
"loss": 0.7524, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.574626865671642, |
|
"grad_norm": 0.42304080724716187, |
|
"learning_rate": 0.0001852708211750564, |
|
"loss": 0.7533, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.611940298507463, |
|
"grad_norm": 0.4376600384712219, |
|
"learning_rate": 0.00018458320592590975, |
|
"loss": 0.7635, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.6492537313432836, |
|
"grad_norm": 0.39463093876838684, |
|
"learning_rate": 0.00018388124162703275, |
|
"loss": 0.7438, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.6865671641791042, |
|
"grad_norm": 0.3841034770011902, |
|
"learning_rate": 0.00018316504736259255, |
|
"loss": 0.759, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.7238805970149254, |
|
"grad_norm": 0.43507230281829834, |
|
"learning_rate": 0.00018243474463078735, |
|
"loss": 0.7477, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.7611940298507465, |
|
"grad_norm": 0.41791486740112305, |
|
"learning_rate": 0.00018169045732323492, |
|
"loss": 0.7463, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.798507462686567, |
|
"grad_norm": 0.4145572781562805, |
|
"learning_rate": 0.00018093231170395507, |
|
"loss": 0.7438, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.835820895522388, |
|
"grad_norm": 0.39668190479278564, |
|
"learning_rate": 0.00018016043638794974, |
|
"loss": 0.7545, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.873134328358209, |
|
"grad_norm": 0.36246854066848755, |
|
"learning_rate": 0.0001793749623193842, |
|
"loss": 0.7483, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.91044776119403, |
|
"grad_norm": 0.4155827462673187, |
|
"learning_rate": 0.00017857602274937308, |
|
"loss": 0.7499, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.9477611940298507, |
|
"grad_norm": 0.3993385136127472, |
|
"learning_rate": 0.00017776375321337521, |
|
"loss": 0.7402, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.9850746268656714, |
|
"grad_norm": 0.40633976459503174, |
|
"learning_rate": 0.00017693829150820068, |
|
"loss": 0.7387, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.440061092376709, |
|
"eval_runtime": 0.5179, |
|
"eval_samples_per_second": 3.862, |
|
"eval_steps_per_second": 1.931, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 3.0223880597014925, |
|
"grad_norm": 0.43004971742630005, |
|
"learning_rate": 0.00017609977766863458, |
|
"loss": 0.7429, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 3.0597014925373136, |
|
"grad_norm": 0.4013059735298157, |
|
"learning_rate": 0.00017524835394368065, |
|
"loss": 0.7087, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.0970149253731343, |
|
"grad_norm": 0.4613398313522339, |
|
"learning_rate": 0.0001743841647724299, |
|
"loss": 0.7234, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 3.1343283582089554, |
|
"grad_norm": 0.3847793936729431, |
|
"learning_rate": 0.00017350735675955697, |
|
"loss": 0.7214, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.171641791044776, |
|
"grad_norm": 0.4573831558227539, |
|
"learning_rate": 0.00017261807865044993, |
|
"loss": 0.7228, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 3.208955223880597, |
|
"grad_norm": 0.4393455684185028, |
|
"learning_rate": 0.00017171648130597612, |
|
"loss": 0.7202, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.246268656716418, |
|
"grad_norm": 0.45842474699020386, |
|
"learning_rate": 0.00017080271767688963, |
|
"loss": 0.7098, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 3.283582089552239, |
|
"grad_norm": 0.46594393253326416, |
|
"learning_rate": 0.00016987694277788417, |
|
"loss": 0.7159, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.3208955223880596, |
|
"grad_norm": 0.40664443373680115, |
|
"learning_rate": 0.00016893931366129562, |
|
"loss": 0.7128, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 3.3582089552238807, |
|
"grad_norm": 0.38857197761535645, |
|
"learning_rate": 0.00016798998939045895, |
|
"loss": 0.7223, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.3955223880597014, |
|
"grad_norm": 0.443276047706604, |
|
"learning_rate": 0.0001670291310127242, |
|
"loss": 0.7132, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 3.4328358208955225, |
|
"grad_norm": 0.3902607858181, |
|
"learning_rate": 0.0001660569015321357, |
|
"loss": 0.7139, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.470149253731343, |
|
"grad_norm": 0.3990344703197479, |
|
"learning_rate": 0.00016507346588177935, |
|
"loss": 0.7191, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 3.5074626865671643, |
|
"grad_norm": 0.4015812277793884, |
|
"learning_rate": 0.00016407899089580262, |
|
"loss": 0.7146, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.544776119402985, |
|
"grad_norm": 0.44737905263900757, |
|
"learning_rate": 0.0001630736452811122, |
|
"loss": 0.7182, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 3.582089552238806, |
|
"grad_norm": 0.4523073136806488, |
|
"learning_rate": 0.0001620575995887538, |
|
"loss": 0.7212, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.6194029850746268, |
|
"grad_norm": 0.4242749512195587, |
|
"learning_rate": 0.00016103102618497922, |
|
"loss": 0.7168, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 3.656716417910448, |
|
"grad_norm": 0.4483940899372101, |
|
"learning_rate": 0.0001599940992220053, |
|
"loss": 0.7145, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.6940298507462686, |
|
"grad_norm": 0.4250345826148987, |
|
"learning_rate": 0.00015894699460847016, |
|
"loss": 0.6993, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 3.7313432835820897, |
|
"grad_norm": 0.38691166043281555, |
|
"learning_rate": 0.00015788988997959114, |
|
"loss": 0.7135, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.7686567164179103, |
|
"grad_norm": 0.4665898382663727, |
|
"learning_rate": 0.00015682296466703024, |
|
"loss": 0.7226, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 3.8059701492537314, |
|
"grad_norm": 0.501005232334137, |
|
"learning_rate": 0.00015574639966847126, |
|
"loss": 0.7189, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.843283582089552, |
|
"grad_norm": 0.3910924792289734, |
|
"learning_rate": 0.0001546603776169149, |
|
"loss": 0.7208, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 3.8805970149253732, |
|
"grad_norm": 0.4152643382549286, |
|
"learning_rate": 0.00015356508274969594, |
|
"loss": 0.7248, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.917910447761194, |
|
"grad_norm": 0.4302676022052765, |
|
"learning_rate": 0.00015246070087722845, |
|
"loss": 0.7169, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.955223880597015, |
|
"grad_norm": 0.39138561487197876, |
|
"learning_rate": 0.0001513474193514842, |
|
"loss": 0.7243, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.9925373134328357, |
|
"grad_norm": 0.4384176731109619, |
|
"learning_rate": 0.0001502254270342095, |
|
"loss": 0.7128, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.4540088176727295, |
|
"eval_runtime": 0.518, |
|
"eval_samples_per_second": 3.861, |
|
"eval_steps_per_second": 1.93, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 4.029850746268656, |
|
"grad_norm": 0.4122828543186188, |
|
"learning_rate": 0.00014909491426488578, |
|
"loss": 0.6889, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 4.067164179104478, |
|
"grad_norm": 0.4475855827331543, |
|
"learning_rate": 0.0001479560728284398, |
|
"loss": 0.6847, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 4.104477611940299, |
|
"grad_norm": 0.47523927688598633, |
|
"learning_rate": 0.0001468090959227082, |
|
"loss": 0.6887, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 4.141791044776119, |
|
"grad_norm": 0.39732715487480164, |
|
"learning_rate": 0.00014565417812566285, |
|
"loss": 0.6877, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 4.17910447761194, |
|
"grad_norm": 0.4284641444683075, |
|
"learning_rate": 0.00014449151536240166, |
|
"loss": 0.6932, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.2164179104477615, |
|
"grad_norm": 0.43158912658691406, |
|
"learning_rate": 0.0001433213048719111, |
|
"loss": 0.6893, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 4.253731343283582, |
|
"grad_norm": 0.4248037338256836, |
|
"learning_rate": 0.00014214374517360575, |
|
"loss": 0.6846, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 4.291044776119403, |
|
"grad_norm": 0.4019497334957123, |
|
"learning_rate": 0.00014095903603365066, |
|
"loss": 0.6872, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 4.3283582089552235, |
|
"grad_norm": 0.42748814821243286, |
|
"learning_rate": 0.00013976737843107202, |
|
"loss": 0.6876, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.365671641791045, |
|
"grad_norm": 0.4337392747402191, |
|
"learning_rate": 0.00013856897452366242, |
|
"loss": 0.6981, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 4.402985074626866, |
|
"grad_norm": 0.4018155634403229, |
|
"learning_rate": 0.00013736402761368598, |
|
"loss": 0.6872, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.440298507462686, |
|
"grad_norm": 0.40716463327407837, |
|
"learning_rate": 0.00013615274211338909, |
|
"loss": 0.6874, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 4.477611940298507, |
|
"grad_norm": 0.4064732491970062, |
|
"learning_rate": 0.0001349353235103232, |
|
"loss": 0.69, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.514925373134329, |
|
"grad_norm": 0.38808923959732056, |
|
"learning_rate": 0.00013371197833248507, |
|
"loss": 0.6906, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 4.552238805970149, |
|
"grad_norm": 0.4051991105079651, |
|
"learning_rate": 0.00013248291411328047, |
|
"loss": 0.6898, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.58955223880597, |
|
"grad_norm": 0.4059875011444092, |
|
"learning_rate": 0.00013124833935631726, |
|
"loss": 0.688, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 4.6268656716417915, |
|
"grad_norm": 0.4350437521934509, |
|
"learning_rate": 0.0001300084635000341, |
|
"loss": 0.6868, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.664179104477612, |
|
"grad_norm": 0.43591079115867615, |
|
"learning_rate": 0.0001287634968821703, |
|
"loss": 0.6843, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 4.701492537313433, |
|
"grad_norm": 0.3807081878185272, |
|
"learning_rate": 0.00012751365070408333, |
|
"loss": 0.6792, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.7388059701492535, |
|
"grad_norm": 0.3958300054073334, |
|
"learning_rate": 0.00012625913699491984, |
|
"loss": 0.6896, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 4.776119402985074, |
|
"grad_norm": 0.4042377471923828, |
|
"learning_rate": 0.00012500016857564585, |
|
"loss": 0.6875, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.813432835820896, |
|
"grad_norm": 0.40110722184181213, |
|
"learning_rate": 0.00012373695902294314, |
|
"loss": 0.6923, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 4.850746268656716, |
|
"grad_norm": 0.4298909604549408, |
|
"learning_rate": 0.0001224697226329772, |
|
"loss": 0.6975, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.888059701492537, |
|
"grad_norm": 0.4291529655456543, |
|
"learning_rate": 0.00012119867438504302, |
|
"loss": 0.6908, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 4.925373134328359, |
|
"grad_norm": 0.4685961902141571, |
|
"learning_rate": 0.00011992402990509515, |
|
"loss": 0.6836, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.962686567164179, |
|
"grad_norm": 0.4041878879070282, |
|
"learning_rate": 0.00011864600542916813, |
|
"loss": 0.6921, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.3824089467525482, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.6837, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.465812087059021, |
|
"eval_runtime": 0.5174, |
|
"eval_samples_per_second": 3.866, |
|
"eval_steps_per_second": 1.933, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 5.037313432835821, |
|
"grad_norm": 0.4383637309074402, |
|
"learning_rate": 0.0001160806842637173, |
|
"loss": 0.6631, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 5.074626865671641, |
|
"grad_norm": 0.40598100423812866, |
|
"learning_rate": 0.000114793822766033, |
|
"loss": 0.6596, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 5.111940298507463, |
|
"grad_norm": 0.413703590631485, |
|
"learning_rate": 0.00011350445158222074, |
|
"loss": 0.6631, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 5.149253731343284, |
|
"grad_norm": 0.44605740904808044, |
|
"learning_rate": 0.00011221278944661473, |
|
"loss": 0.6463, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 5.186567164179104, |
|
"grad_norm": 0.43800660967826843, |
|
"learning_rate": 0.00011091905548219597, |
|
"loss": 0.6662, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 5.223880597014926, |
|
"grad_norm": 0.4596647620201111, |
|
"learning_rate": 0.00010962346916341903, |
|
"loss": 0.6638, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 5.2611940298507465, |
|
"grad_norm": 0.4407581090927124, |
|
"learning_rate": 0.00010832625027897978, |
|
"loss": 0.6644, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 5.298507462686567, |
|
"grad_norm": 0.41823288798332214, |
|
"learning_rate": 0.0001070276188945293, |
|
"loss": 0.6565, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 5.335820895522388, |
|
"grad_norm": 0.42799797654151917, |
|
"learning_rate": 0.00010572779531534112, |
|
"loss": 0.6555, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 5.373134328358209, |
|
"grad_norm": 0.3943883776664734, |
|
"learning_rate": 0.00010442700004893764, |
|
"loss": 0.6572, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 5.41044776119403, |
|
"grad_norm": 0.40598776936531067, |
|
"learning_rate": 0.00010312545376768246, |
|
"loss": 0.6564, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 5.447761194029851, |
|
"grad_norm": 0.42347079515457153, |
|
"learning_rate": 0.0001018233772713443, |
|
"loss": 0.6677, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 5.485074626865671, |
|
"grad_norm": 0.4139921963214874, |
|
"learning_rate": 0.00010052099144964004, |
|
"loss": 0.6701, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 5.522388059701493, |
|
"grad_norm": 0.4295801520347595, |
|
"learning_rate": 9.92185172447616e-05, |
|
"loss": 0.6605, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 5.559701492537314, |
|
"grad_norm": 0.41350170969963074, |
|
"learning_rate": 9.791617561389475e-05, |
|
"loss": 0.6671, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 5.597014925373134, |
|
"grad_norm": 0.4078296720981598, |
|
"learning_rate": 9.661418749173467e-05, |
|
"loss": 0.669, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 5.634328358208955, |
|
"grad_norm": 0.4294317066669464, |
|
"learning_rate": 9.531277375300599e-05, |
|
"loss": 0.6653, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 5.6716417910447765, |
|
"grad_norm": 0.4155528247356415, |
|
"learning_rate": 9.40121551749925e-05, |
|
"loss": 0.6587, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 5.708955223880597, |
|
"grad_norm": 0.4128773808479309, |
|
"learning_rate": 9.271255240008356e-05, |
|
"loss": 0.6572, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 5.746268656716418, |
|
"grad_norm": 0.4138401746749878, |
|
"learning_rate": 9.141418589834339e-05, |
|
"loss": 0.6619, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 5.7835820895522385, |
|
"grad_norm": 0.40413615107536316, |
|
"learning_rate": 9.011727593010981e-05, |
|
"loss": 0.6745, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 5.82089552238806, |
|
"grad_norm": 0.447427898645401, |
|
"learning_rate": 8.882204250862796e-05, |
|
"loss": 0.6625, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 5.858208955223881, |
|
"grad_norm": 0.4221794009208679, |
|
"learning_rate": 8.752870536272673e-05, |
|
"loss": 0.6743, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 5.895522388059701, |
|
"grad_norm": 0.4193105399608612, |
|
"learning_rate": 8.623748389954283e-05, |
|
"loss": 0.6666, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 5.932835820895522, |
|
"grad_norm": 0.4287867248058319, |
|
"learning_rate": 8.494859716729962e-05, |
|
"loss": 0.6676, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 5.970149253731344, |
|
"grad_norm": 0.40571707487106323, |
|
"learning_rate": 8.366226381814697e-05, |
|
"loss": 0.6691, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 1.4930399656295776, |
|
"eval_runtime": 0.5186, |
|
"eval_samples_per_second": 3.856, |
|
"eval_steps_per_second": 1.928, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 6.007462686567164, |
|
"grad_norm": 0.4177990257740021, |
|
"learning_rate": 8.237870207106821e-05, |
|
"loss": 0.6566, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 6.044776119402985, |
|
"grad_norm": 0.4325125813484192, |
|
"learning_rate": 8.109812967486025e-05, |
|
"loss": 0.6271, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 6.082089552238806, |
|
"grad_norm": 0.42587417364120483, |
|
"learning_rate": 7.982076387119421e-05, |
|
"loss": 0.6384, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 6.119402985074627, |
|
"grad_norm": 0.4367959499359131, |
|
"learning_rate": 7.854682135776131e-05, |
|
"loss": 0.642, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 6.156716417910448, |
|
"grad_norm": 0.43222272396087646, |
|
"learning_rate": 7.727651825151145e-05, |
|
"loss": 0.6314, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 6.1940298507462686, |
|
"grad_norm": 0.44278955459594727, |
|
"learning_rate": 7.601007005199021e-05, |
|
"loss": 0.6389, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 6.231343283582089, |
|
"grad_norm": 0.4457733929157257, |
|
"learning_rate": 7.474769160478079e-05, |
|
"loss": 0.6411, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 6.268656716417911, |
|
"grad_norm": 0.4409390091896057, |
|
"learning_rate": 7.348959706505626e-05, |
|
"loss": 0.6381, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 6.3059701492537314, |
|
"grad_norm": 0.453755259513855, |
|
"learning_rate": 7.223599986124994e-05, |
|
"loss": 0.6458, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 6.343283582089552, |
|
"grad_norm": 0.420446515083313, |
|
"learning_rate": 7.09871126588481e-05, |
|
"loss": 0.6409, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 6.380597014925373, |
|
"grad_norm": 0.4332159757614136, |
|
"learning_rate": 6.974314732431258e-05, |
|
"loss": 0.6396, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 6.417910447761194, |
|
"grad_norm": 0.44185054302215576, |
|
"learning_rate": 6.850431488913895e-05, |
|
"loss": 0.6461, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 6.455223880597015, |
|
"grad_norm": 0.4363011121749878, |
|
"learning_rate": 6.727082551405636e-05, |
|
"loss": 0.6435, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 6.492537313432836, |
|
"grad_norm": 0.426645427942276, |
|
"learning_rate": 6.604288845337453e-05, |
|
"loss": 0.6443, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 6.529850746268656, |
|
"grad_norm": 0.4306298792362213, |
|
"learning_rate": 6.482071201948556e-05, |
|
"loss": 0.6405, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 6.567164179104478, |
|
"grad_norm": 0.42580053210258484, |
|
"learning_rate": 6.360450354752458e-05, |
|
"loss": 0.636, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 6.604477611940299, |
|
"grad_norm": 0.4214308261871338, |
|
"learning_rate": 6.239446936019657e-05, |
|
"loss": 0.6411, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 6.641791044776119, |
|
"grad_norm": 0.44287511706352234, |
|
"learning_rate": 6.119081473277501e-05, |
|
"loss": 0.6427, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 6.67910447761194, |
|
"grad_norm": 0.4150986671447754, |
|
"learning_rate": 5.99937438582782e-05, |
|
"loss": 0.6472, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 6.7164179104477615, |
|
"grad_norm": 0.446879118680954, |
|
"learning_rate": 5.880345981282876e-05, |
|
"loss": 0.6408, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 6.753731343283582, |
|
"grad_norm": 0.4269532561302185, |
|
"learning_rate": 5.7620164521203356e-05, |
|
"loss": 0.6367, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 6.791044776119403, |
|
"grad_norm": 0.4269901216030121, |
|
"learning_rate": 5.6444058722577165e-05, |
|
"loss": 0.6396, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 6.8283582089552235, |
|
"grad_norm": 0.42097654938697815, |
|
"learning_rate": 5.5275341936469396e-05, |
|
"loss": 0.6473, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 6.865671641791045, |
|
"grad_norm": 0.4283384680747986, |
|
"learning_rate": 5.4114212428896424e-05, |
|
"loss": 0.6496, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 6.902985074626866, |
|
"grad_norm": 0.4407792389392853, |
|
"learning_rate": 5.296086717873685e-05, |
|
"loss": 0.6459, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 6.940298507462686, |
|
"grad_norm": 0.43775680661201477, |
|
"learning_rate": 5.1815501844315105e-05, |
|
"loss": 0.6414, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 6.977611940298507, |
|
"grad_norm": 0.4383552372455597, |
|
"learning_rate": 5.0678310730209275e-05, |
|
"loss": 0.6389, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 1.5040441751480103, |
|
"eval_runtime": 0.5191, |
|
"eval_samples_per_second": 3.853, |
|
"eval_steps_per_second": 1.927, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 7.014925373134329, |
|
"grad_norm": 0.4299301505088806, |
|
"learning_rate": 4.954948675428853e-05, |
|
"loss": 0.6411, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 7.052238805970149, |
|
"grad_norm": 0.44037163257598877, |
|
"learning_rate": 4.84292214149855e-05, |
|
"loss": 0.6185, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 7.08955223880597, |
|
"grad_norm": 0.4394110441207886, |
|
"learning_rate": 4.7317704758809946e-05, |
|
"loss": 0.6158, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 7.126865671641791, |
|
"grad_norm": 0.441795289516449, |
|
"learning_rate": 4.6215125348108247e-05, |
|
"loss": 0.6202, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 7.164179104477612, |
|
"grad_norm": 0.44138744473457336, |
|
"learning_rate": 4.512167022907494e-05, |
|
"loss": 0.6238, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 7.201492537313433, |
|
"grad_norm": 0.453387588262558, |
|
"learning_rate": 4.40375249000216e-05, |
|
"loss": 0.6258, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 7.2388059701492535, |
|
"grad_norm": 0.44053417444229126, |
|
"learning_rate": 4.296287327990797e-05, |
|
"loss": 0.6237, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 7.276119402985074, |
|
"grad_norm": 0.44405415654182434, |
|
"learning_rate": 4.189789767714102e-05, |
|
"loss": 0.6214, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 7.313432835820896, |
|
"grad_norm": 0.4466931223869324, |
|
"learning_rate": 4.084277875864776e-05, |
|
"loss": 0.6258, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 7.350746268656716, |
|
"grad_norm": 0.45868679881095886, |
|
"learning_rate": 3.979769551922592e-05, |
|
"loss": 0.6271, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 7.388059701492537, |
|
"grad_norm": 0.44392016530036926, |
|
"learning_rate": 3.876282525117847e-05, |
|
"loss": 0.6191, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 7.425373134328359, |
|
"grad_norm": 0.4744507670402527, |
|
"learning_rate": 3.7738343514237106e-05, |
|
"loss": 0.6291, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 7.462686567164179, |
|
"grad_norm": 0.43945175409317017, |
|
"learning_rate": 3.672442410577965e-05, |
|
"loss": 0.6305, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"grad_norm": 0.4612315893173218, |
|
"learning_rate": 3.5721239031346066e-05, |
|
"loss": 0.6236, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 7.537313432835821, |
|
"grad_norm": 0.45296919345855713, |
|
"learning_rate": 3.472895847545905e-05, |
|
"loss": 0.6282, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 7.574626865671641, |
|
"grad_norm": 0.4535280466079712, |
|
"learning_rate": 3.374775077275293e-05, |
|
"loss": 0.6249, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 7.611940298507463, |
|
"grad_norm": 0.4502476155757904, |
|
"learning_rate": 3.27777823794168e-05, |
|
"loss": 0.6267, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 7.649253731343284, |
|
"grad_norm": 0.4433761239051819, |
|
"learning_rate": 3.1819217844956214e-05, |
|
"loss": 0.6344, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 7.686567164179104, |
|
"grad_norm": 0.4449211657047272, |
|
"learning_rate": 3.0872219784278354e-05, |
|
"loss": 0.6241, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 7.723880597014926, |
|
"grad_norm": 0.47005486488342285, |
|
"learning_rate": 2.9936948850105152e-05, |
|
"loss": 0.6166, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 7.7611940298507465, |
|
"grad_norm": 0.4615201950073242, |
|
"learning_rate": 2.901356370571967e-05, |
|
"loss": 0.6269, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 7.798507462686567, |
|
"grad_norm": 0.44223588705062866, |
|
"learning_rate": 2.8102220998049843e-05, |
|
"loss": 0.6299, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 7.835820895522388, |
|
"grad_norm": 0.4457671642303467, |
|
"learning_rate": 2.7203075331094017e-05, |
|
"loss": 0.627, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 7.8731343283582085, |
|
"grad_norm": 0.4446623623371124, |
|
"learning_rate": 2.6316279239693465e-05, |
|
"loss": 0.6195, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 7.91044776119403, |
|
"grad_norm": 0.4469725489616394, |
|
"learning_rate": 2.54419831636557e-05, |
|
"loss": 0.612, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 7.947761194029851, |
|
"grad_norm": 0.4488806426525116, |
|
"learning_rate": 2.458033542223316e-05, |
|
"loss": 0.6139, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 7.985074626865671, |
|
"grad_norm": 0.44639790058135986, |
|
"learning_rate": 2.3731482188961818e-05, |
|
"loss": 0.6291, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 1.5243113040924072, |
|
"eval_runtime": 0.5203, |
|
"eval_samples_per_second": 3.844, |
|
"eval_steps_per_second": 1.922, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 8.022388059701493, |
|
"grad_norm": 0.43840205669403076, |
|
"learning_rate": 2.28955674668636e-05, |
|
"loss": 0.6144, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 8.059701492537313, |
|
"grad_norm": 0.44431746006011963, |
|
"learning_rate": 2.2072733064017103e-05, |
|
"loss": 0.6062, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 8.097014925373134, |
|
"grad_norm": 0.44257909059524536, |
|
"learning_rate": 2.1263118569500795e-05, |
|
"loss": 0.6067, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 8.134328358208956, |
|
"grad_norm": 0.4406292140483856, |
|
"learning_rate": 2.0466861329712473e-05, |
|
"loss": 0.6169, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 8.171641791044776, |
|
"grad_norm": 0.45199882984161377, |
|
"learning_rate": 1.968409642506913e-05, |
|
"loss": 0.6133, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 8.208955223880597, |
|
"grad_norm": 0.4403633773326874, |
|
"learning_rate": 1.89149566470915e-05, |
|
"loss": 0.6059, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 8.246268656716419, |
|
"grad_norm": 0.45532822608947754, |
|
"learning_rate": 1.8159572475876718e-05, |
|
"loss": 0.6042, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 8.283582089552239, |
|
"grad_norm": 0.4559955596923828, |
|
"learning_rate": 1.741807205796314e-05, |
|
"loss": 0.6189, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 8.32089552238806, |
|
"grad_norm": 0.45486971735954285, |
|
"learning_rate": 1.6690581184590858e-05, |
|
"loss": 0.6116, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 8.35820895522388, |
|
"grad_norm": 0.44671645760536194, |
|
"learning_rate": 1.5977223270362196e-05, |
|
"loss": 0.6149, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 8.395522388059701, |
|
"grad_norm": 0.45056024193763733, |
|
"learning_rate": 1.5278119332305053e-05, |
|
"loss": 0.614, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 8.432835820895523, |
|
"grad_norm": 0.4756515920162201, |
|
"learning_rate": 1.459338796934293e-05, |
|
"loss": 0.6163, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 8.470149253731343, |
|
"grad_norm": 0.4487175941467285, |
|
"learning_rate": 1.3923145342175482e-05, |
|
"loss": 0.6098, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 8.507462686567164, |
|
"grad_norm": 0.46132373809814453, |
|
"learning_rate": 1.3267505153572501e-05, |
|
"loss": 0.6099, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 8.544776119402986, |
|
"grad_norm": 0.44755882024765015, |
|
"learning_rate": 1.2626578629084784e-05, |
|
"loss": 0.6169, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 8.582089552238806, |
|
"grad_norm": 0.46651872992515564, |
|
"learning_rate": 1.2000474498175552e-05, |
|
"loss": 0.6099, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 8.619402985074627, |
|
"grad_norm": 0.47415512800216675, |
|
"learning_rate": 1.138929897577493e-05, |
|
"loss": 0.6104, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 8.656716417910447, |
|
"grad_norm": 0.4649696350097656, |
|
"learning_rate": 1.0793155744261351e-05, |
|
"loss": 0.6094, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 8.694029850746269, |
|
"grad_norm": 0.44566088914871216, |
|
"learning_rate": 1.0212145935872375e-05, |
|
"loss": 0.6223, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 8.73134328358209, |
|
"grad_norm": 0.4482034742832184, |
|
"learning_rate": 9.646368115548232e-06, |
|
"loss": 0.6064, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 8.76865671641791, |
|
"grad_norm": 0.4628354012966156, |
|
"learning_rate": 9.095918264210779e-06, |
|
"loss": 0.6139, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 8.805970149253731, |
|
"grad_norm": 0.4561688303947449, |
|
"learning_rate": 8.56088976248095e-06, |
|
"loss": 0.6141, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 8.843283582089553, |
|
"grad_norm": 0.4501047730445862, |
|
"learning_rate": 8.041373374837302e-06, |
|
"loss": 0.6184, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 8.880597014925373, |
|
"grad_norm": 0.4514229893684387, |
|
"learning_rate": 7.53745723421827e-06, |
|
"loss": 0.6069, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 8.917910447761194, |
|
"grad_norm": 0.4529721438884735, |
|
"learning_rate": 7.0492268270709875e-06, |
|
"loss": 0.6136, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 8.955223880597014, |
|
"grad_norm": 0.4562200605869293, |
|
"learning_rate": 6.576764978849004e-06, |
|
"loss": 0.6053, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 8.992537313432836, |
|
"grad_norm": 0.455243319272995, |
|
"learning_rate": 6.1201518399613635e-06, |
|
"loss": 0.6197, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 1.5375313758850098, |
|
"eval_runtime": 0.5174, |
|
"eval_samples_per_second": 3.865, |
|
"eval_steps_per_second": 1.933, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 9.029850746268657, |
|
"grad_norm": 0.44451919198036194, |
|
"learning_rate": 5.679464872175666e-06, |
|
"loss": 0.6126, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 9.067164179104477, |
|
"grad_norm": 0.4510916471481323, |
|
"learning_rate": 5.254778835477015e-06, |
|
"loss": 0.619, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 9.104477611940299, |
|
"grad_norm": 0.4504685699939728, |
|
"learning_rate": 4.846165775385459e-06, |
|
"loss": 0.6059, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 9.14179104477612, |
|
"grad_norm": 0.4434657692909241, |
|
"learning_rate": 4.4536950107339605e-06, |
|
"loss": 0.601, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 9.17910447761194, |
|
"grad_norm": 0.46363726258277893, |
|
"learning_rate": 4.077433121908747e-06, |
|
"loss": 0.6165, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 9.216417910447761, |
|
"grad_norm": 0.45644015073776245, |
|
"learning_rate": 3.717443939554388e-06, |
|
"loss": 0.6102, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 9.253731343283581, |
|
"grad_norm": 0.45389410853385925, |
|
"learning_rate": 3.3737885337452814e-06, |
|
"loss": 0.6029, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 9.291044776119403, |
|
"grad_norm": 0.4506882131099701, |
|
"learning_rate": 3.0465252036255165e-06, |
|
"loss": 0.6091, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 9.328358208955224, |
|
"grad_norm": 0.44706588983535767, |
|
"learning_rate": 2.735709467518699e-06, |
|
"loss": 0.6114, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 9.365671641791044, |
|
"grad_norm": 0.45065855979919434, |
|
"learning_rate": 2.4413940535096156e-06, |
|
"loss": 0.603, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 9.402985074626866, |
|
"grad_norm": 0.4460483491420746, |
|
"learning_rate": 2.1636288904992585e-06, |
|
"loss": 0.6019, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 9.440298507462687, |
|
"grad_norm": 0.4578142762184143, |
|
"learning_rate": 1.902461099734587e-06, |
|
"loss": 0.6065, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 9.477611940298507, |
|
"grad_norm": 0.46080294251441956, |
|
"learning_rate": 1.6579349868147687e-06, |
|
"loss": 0.5979, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 9.514925373134329, |
|
"grad_norm": 0.451742559671402, |
|
"learning_rate": 1.430092034174979e-06, |
|
"loss": 0.6133, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 9.552238805970148, |
|
"grad_norm": 0.4507807493209839, |
|
"learning_rate": 1.2189708940490652e-06, |
|
"loss": 0.604, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 9.58955223880597, |
|
"grad_norm": 0.45165741443634033, |
|
"learning_rate": 1.0246073819125345e-06, |
|
"loss": 0.604, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 9.626865671641792, |
|
"grad_norm": 0.45481327176094055, |
|
"learning_rate": 8.470344704066046e-07, |
|
"loss": 0.6077, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 9.664179104477611, |
|
"grad_norm": 0.4481595456600189, |
|
"learning_rate": 6.862822837445881e-07, |
|
"loss": 0.6021, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 9.701492537313433, |
|
"grad_norm": 0.4543148875236511, |
|
"learning_rate": 5.42378092601481e-07, |
|
"loss": 0.5979, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 9.738805970149254, |
|
"grad_norm": 0.4538787305355072, |
|
"learning_rate": 4.153463094877186e-07, |
|
"loss": 0.6013, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 9.776119402985074, |
|
"grad_norm": 0.4568292796611786, |
|
"learning_rate": 3.0520848460765527e-07, |
|
"loss": 0.6035, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 9.813432835820896, |
|
"grad_norm": 0.4435996115207672, |
|
"learning_rate": 2.119833022037221e-07, |
|
"loss": 0.6022, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 9.850746268656717, |
|
"grad_norm": 0.4439810514450073, |
|
"learning_rate": 1.3568657738678435e-07, |
|
"loss": 0.6025, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 9.888059701492537, |
|
"grad_norm": 0.4461255371570587, |
|
"learning_rate": 7.633125345317682e-08, |
|
"loss": 0.5931, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 9.925373134328359, |
|
"grad_norm": 0.4546414315700531, |
|
"learning_rate": 3.392739968894887e-08, |
|
"loss": 0.6182, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 9.962686567164178, |
|
"grad_norm": 0.45576563477516174, |
|
"learning_rate": 8.48220966164215e-09, |
|
"loss": 0.6126, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.4497128427028656, |
|
"learning_rate": 0.0, |
|
"loss": 0.6028, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 1.542757511138916, |
|
"eval_runtime": 0.5408, |
|
"eval_samples_per_second": 3.698, |
|
"eval_steps_per_second": 1.849, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 1340, |
|
"total_flos": 3.755518175798624e+18, |
|
"train_loss": 0.7269157813556159, |
|
"train_runtime": 7393.4759, |
|
"train_samples_per_second": 11.586, |
|
"train_steps_per_second": 0.181 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1340, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.755518175798624e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|