real-jiakai's picture
Upload folder using huggingface_hub
3384522 verified
raw
history blame
15 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.0,
"eval_steps": 500,
"global_step": 40035,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.062445360309728984,
"grad_norm": 14.40565299987793,
"learning_rate": 2.9625327838141625e-05,
"loss": 1.9858,
"step": 500
},
{
"epoch": 0.12489072061945797,
"grad_norm": 19.097322463989258,
"learning_rate": 2.9250655676283253e-05,
"loss": 1.5149,
"step": 1000
},
{
"epoch": 0.18733608092918697,
"grad_norm": 15.344030380249023,
"learning_rate": 2.887598351442488e-05,
"loss": 1.4258,
"step": 1500
},
{
"epoch": 0.24978144123891594,
"grad_norm": 22.120119094848633,
"learning_rate": 2.8501311352566505e-05,
"loss": 1.3884,
"step": 2000
},
{
"epoch": 0.31222680154864496,
"grad_norm": 15.072545051574707,
"learning_rate": 2.812663919070813e-05,
"loss": 1.3306,
"step": 2500
},
{
"epoch": 0.37467216185837393,
"grad_norm": 15.24660587310791,
"learning_rate": 2.7751967028849757e-05,
"loss": 1.2959,
"step": 3000
},
{
"epoch": 0.4371175221681029,
"grad_norm": 13.45875072479248,
"learning_rate": 2.7377294866991385e-05,
"loss": 1.2756,
"step": 3500
},
{
"epoch": 0.49956288247783187,
"grad_norm": 12.797966003417969,
"learning_rate": 2.700262270513301e-05,
"loss": 1.2815,
"step": 4000
},
{
"epoch": 0.5620082427875609,
"grad_norm": 21.160541534423828,
"learning_rate": 2.6627950543274633e-05,
"loss": 1.2025,
"step": 4500
},
{
"epoch": 0.6244536030972899,
"grad_norm": 13.691265106201172,
"learning_rate": 2.625327838141626e-05,
"loss": 1.1658,
"step": 5000
},
{
"epoch": 0.6868989634070188,
"grad_norm": 21.028295516967773,
"learning_rate": 2.587860621955789e-05,
"loss": 1.1849,
"step": 5500
},
{
"epoch": 0.7493443237167479,
"grad_norm": 15.639451026916504,
"learning_rate": 2.5503934057699513e-05,
"loss": 1.154,
"step": 6000
},
{
"epoch": 0.8117896840264769,
"grad_norm": 15.77192497253418,
"learning_rate": 2.5129261895841137e-05,
"loss": 1.1657,
"step": 6500
},
{
"epoch": 0.8742350443362058,
"grad_norm": 19.546422958374023,
"learning_rate": 2.4754589733982765e-05,
"loss": 1.1329,
"step": 7000
},
{
"epoch": 0.9366804046459348,
"grad_norm": 11.490877151489258,
"learning_rate": 2.4379917572124393e-05,
"loss": 1.1304,
"step": 7500
},
{
"epoch": 0.9991257649556637,
"grad_norm": 13.906126022338867,
"learning_rate": 2.4005245410266017e-05,
"loss": 1.0877,
"step": 8000
},
{
"epoch": 1.0615711252653928,
"grad_norm": 22.66297721862793,
"learning_rate": 2.3630573248407645e-05,
"loss": 0.8364,
"step": 8500
},
{
"epoch": 1.1240164855751218,
"grad_norm": 21.926162719726562,
"learning_rate": 2.325590108654927e-05,
"loss": 0.8493,
"step": 9000
},
{
"epoch": 1.1864618458848508,
"grad_norm": 21.4632625579834,
"learning_rate": 2.2881228924690897e-05,
"loss": 0.8424,
"step": 9500
},
{
"epoch": 1.2489072061945796,
"grad_norm": 19.286104202270508,
"learning_rate": 2.250655676283252e-05,
"loss": 0.856,
"step": 10000
},
{
"epoch": 1.3113525665043086,
"grad_norm": 35.93063735961914,
"learning_rate": 2.213188460097415e-05,
"loss": 0.8289,
"step": 10500
},
{
"epoch": 1.3737979268140377,
"grad_norm": 24.366748809814453,
"learning_rate": 2.1757212439115773e-05,
"loss": 0.868,
"step": 11000
},
{
"epoch": 1.4362432871237667,
"grad_norm": 22.02488136291504,
"learning_rate": 2.13825402772574e-05,
"loss": 0.841,
"step": 11500
},
{
"epoch": 1.4986886474334957,
"grad_norm": 54.16468811035156,
"learning_rate": 2.1007868115399025e-05,
"loss": 0.8262,
"step": 12000
},
{
"epoch": 1.5611340077432247,
"grad_norm": 27.68937873840332,
"learning_rate": 2.0633195953540653e-05,
"loss": 0.8225,
"step": 12500
},
{
"epoch": 1.6235793680529538,
"grad_norm": 19.883024215698242,
"learning_rate": 2.0258523791682277e-05,
"loss": 0.8412,
"step": 13000
},
{
"epoch": 1.6860247283626828,
"grad_norm": 25.48287010192871,
"learning_rate": 1.9883851629823905e-05,
"loss": 0.8559,
"step": 13500
},
{
"epoch": 1.7484700886724116,
"grad_norm": 25.736173629760742,
"learning_rate": 1.9509179467965533e-05,
"loss": 0.798,
"step": 14000
},
{
"epoch": 1.8109154489821406,
"grad_norm": 15.345009803771973,
"learning_rate": 1.9134507306107157e-05,
"loss": 0.8568,
"step": 14500
},
{
"epoch": 1.8733608092918697,
"grad_norm": 13.86083698272705,
"learning_rate": 1.875983514424878e-05,
"loss": 0.8144,
"step": 15000
},
{
"epoch": 1.9358061696015985,
"grad_norm": 21.71160316467285,
"learning_rate": 1.838516298239041e-05,
"loss": 0.8484,
"step": 15500
},
{
"epoch": 1.9982515299113275,
"grad_norm": 13.77094841003418,
"learning_rate": 1.8010490820532037e-05,
"loss": 0.8502,
"step": 16000
},
{
"epoch": 2.0606968902210565,
"grad_norm": 21.91532325744629,
"learning_rate": 1.763581865867366e-05,
"loss": 0.5608,
"step": 16500
},
{
"epoch": 2.1231422505307855,
"grad_norm": 19.61579704284668,
"learning_rate": 1.7261146496815285e-05,
"loss": 0.5345,
"step": 17000
},
{
"epoch": 2.1855876108405146,
"grad_norm": 17.571279525756836,
"learning_rate": 1.6886474334956913e-05,
"loss": 0.5251,
"step": 17500
},
{
"epoch": 2.2480329711502436,
"grad_norm": 45.33535385131836,
"learning_rate": 1.651180217309854e-05,
"loss": 0.565,
"step": 18000
},
{
"epoch": 2.3104783314599726,
"grad_norm": 15.158855438232422,
"learning_rate": 1.6137130011240165e-05,
"loss": 0.5472,
"step": 18500
},
{
"epoch": 2.3729236917697016,
"grad_norm": 17.102876663208008,
"learning_rate": 1.576245784938179e-05,
"loss": 0.5638,
"step": 19000
},
{
"epoch": 2.4353690520794307,
"grad_norm": 34.022037506103516,
"learning_rate": 1.5387785687523417e-05,
"loss": 0.5259,
"step": 19500
},
{
"epoch": 2.4978144123891592,
"grad_norm": 20.363927841186523,
"learning_rate": 1.5013113525665045e-05,
"loss": 0.5672,
"step": 20000
},
{
"epoch": 2.5602597726988883,
"grad_norm": 26.551931381225586,
"learning_rate": 1.4638441363806669e-05,
"loss": 0.5642,
"step": 20500
},
{
"epoch": 2.6227051330086173,
"grad_norm": 37.67859649658203,
"learning_rate": 1.4263769201948297e-05,
"loss": 0.5836,
"step": 21000
},
{
"epoch": 2.6851504933183463,
"grad_norm": 23.416738510131836,
"learning_rate": 1.3889097040089921e-05,
"loss": 0.5932,
"step": 21500
},
{
"epoch": 2.7475958536280753,
"grad_norm": 37.503047943115234,
"learning_rate": 1.3514424878231549e-05,
"loss": 0.5601,
"step": 22000
},
{
"epoch": 2.8100412139378044,
"grad_norm": 26.307445526123047,
"learning_rate": 1.3139752716373173e-05,
"loss": 0.5784,
"step": 22500
},
{
"epoch": 2.8724865742475334,
"grad_norm": 15.568779945373535,
"learning_rate": 1.27650805545148e-05,
"loss": 0.5613,
"step": 23000
},
{
"epoch": 2.9349319345572624,
"grad_norm": 19.449880599975586,
"learning_rate": 1.2390408392656425e-05,
"loss": 0.5597,
"step": 23500
},
{
"epoch": 2.9973772948669914,
"grad_norm": 22.43405532836914,
"learning_rate": 1.2015736230798053e-05,
"loss": 0.5637,
"step": 24000
},
{
"epoch": 3.0598226551767205,
"grad_norm": 45.42557907104492,
"learning_rate": 1.1641064068939677e-05,
"loss": 0.3605,
"step": 24500
},
{
"epoch": 3.1222680154864495,
"grad_norm": 51.729000091552734,
"learning_rate": 1.1266391907081305e-05,
"loss": 0.3556,
"step": 25000
},
{
"epoch": 3.1847133757961785,
"grad_norm": 11.632122993469238,
"learning_rate": 1.0891719745222929e-05,
"loss": 0.3751,
"step": 25500
},
{
"epoch": 3.2471587361059076,
"grad_norm": 6.7493743896484375,
"learning_rate": 1.0517047583364557e-05,
"loss": 0.3554,
"step": 26000
},
{
"epoch": 3.309604096415636,
"grad_norm": 9.528502464294434,
"learning_rate": 1.0142375421506183e-05,
"loss": 0.3463,
"step": 26500
},
{
"epoch": 3.372049456725365,
"grad_norm": 28.454116821289062,
"learning_rate": 9.767703259647809e-06,
"loss": 0.3823,
"step": 27000
},
{
"epoch": 3.434494817035094,
"grad_norm": 28.30170440673828,
"learning_rate": 9.393031097789435e-06,
"loss": 0.3529,
"step": 27500
},
{
"epoch": 3.496940177344823,
"grad_norm": 14.024445533752441,
"learning_rate": 9.018358935931061e-06,
"loss": 0.3774,
"step": 28000
},
{
"epoch": 3.5593855376545522,
"grad_norm": 9.80344295501709,
"learning_rate": 8.643686774072687e-06,
"loss": 0.3471,
"step": 28500
},
{
"epoch": 3.6218308979642813,
"grad_norm": 18.51318359375,
"learning_rate": 8.269014612214313e-06,
"loss": 0.3514,
"step": 29000
},
{
"epoch": 3.6842762582740103,
"grad_norm": 23.305683135986328,
"learning_rate": 7.894342450355939e-06,
"loss": 0.3539,
"step": 29500
},
{
"epoch": 3.7467216185837393,
"grad_norm": 31.820262908935547,
"learning_rate": 7.519670288497564e-06,
"loss": 0.3364,
"step": 30000
},
{
"epoch": 3.8091669788934683,
"grad_norm": 28.33430290222168,
"learning_rate": 7.144998126639191e-06,
"loss": 0.3607,
"step": 30500
},
{
"epoch": 3.8716123392031974,
"grad_norm": 30.772987365722656,
"learning_rate": 6.770325964780817e-06,
"loss": 0.3314,
"step": 31000
},
{
"epoch": 3.934057699512926,
"grad_norm": 16.197298049926758,
"learning_rate": 6.395653802922443e-06,
"loss": 0.3449,
"step": 31500
},
{
"epoch": 3.996503059822655,
"grad_norm": 46.485103607177734,
"learning_rate": 6.020981641064069e-06,
"loss": 0.3263,
"step": 32000
},
{
"epoch": 4.058948420132384,
"grad_norm": 51.467411041259766,
"learning_rate": 5.646309479205695e-06,
"loss": 0.2165,
"step": 32500
},
{
"epoch": 4.121393780442113,
"grad_norm": 3.3639559745788574,
"learning_rate": 5.271637317347321e-06,
"loss": 0.2134,
"step": 33000
},
{
"epoch": 4.183839140751842,
"grad_norm": 20.382158279418945,
"learning_rate": 4.896965155488947e-06,
"loss": 0.229,
"step": 33500
},
{
"epoch": 4.246284501061571,
"grad_norm": 22.384031295776367,
"learning_rate": 4.522292993630573e-06,
"loss": 0.2255,
"step": 34000
},
{
"epoch": 4.3087298613713,
"grad_norm": 2.972531318664551,
"learning_rate": 4.147620831772199e-06,
"loss": 0.2167,
"step": 34500
},
{
"epoch": 4.371175221681029,
"grad_norm": 19.604764938354492,
"learning_rate": 3.772948669913825e-06,
"loss": 0.2192,
"step": 35000
},
{
"epoch": 4.433620581990758,
"grad_norm": 16.23500633239746,
"learning_rate": 3.3982765080554515e-06,
"loss": 0.2242,
"step": 35500
},
{
"epoch": 4.496065942300487,
"grad_norm": 8.235413551330566,
"learning_rate": 3.0236043461970775e-06,
"loss": 0.2196,
"step": 36000
},
{
"epoch": 4.558511302610216,
"grad_norm": 22.458770751953125,
"learning_rate": 2.6489321843387035e-06,
"loss": 0.2244,
"step": 36500
},
{
"epoch": 4.620956662919945,
"grad_norm": 39.24953842163086,
"learning_rate": 2.2742600224803295e-06,
"loss": 0.2076,
"step": 37000
},
{
"epoch": 4.683402023229674,
"grad_norm": 10.973857879638672,
"learning_rate": 1.8995878606219555e-06,
"loss": 0.2288,
"step": 37500
},
{
"epoch": 4.745847383539403,
"grad_norm": 11.058865547180176,
"learning_rate": 1.5249156987635818e-06,
"loss": 0.214,
"step": 38000
},
{
"epoch": 4.808292743849132,
"grad_norm": 47.875057220458984,
"learning_rate": 1.150243536905208e-06,
"loss": 0.2237,
"step": 38500
},
{
"epoch": 4.870738104158861,
"grad_norm": 55.14733123779297,
"learning_rate": 7.755713750468341e-07,
"loss": 0.2228,
"step": 39000
},
{
"epoch": 4.93318346446859,
"grad_norm": 3.3354523181915283,
"learning_rate": 4.008992131884601e-07,
"loss": 0.2024,
"step": 39500
},
{
"epoch": 4.9956288247783185,
"grad_norm": 62.27578353881836,
"learning_rate": 2.6227051330086176e-08,
"loss": 0.2024,
"step": 40000
},
{
"epoch": 5.0,
"step": 40035,
"total_flos": 9.414718117765632e+16,
"train_loss": 0.0,
"train_runtime": 0.003,
"train_samples_per_second": 159489123.369,
"train_steps_per_second": 13291036.935
}
],
"logging_steps": 500,
"max_steps": 40035,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 9.414718117765632e+16,
"train_batch_size": 12,
"trial_name": null,
"trial_params": null
}