MedicalLM / trainer_state.json
arelius's picture
Upload 8 files
8efd0d1
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.988236782190741,
"eval_steps": 500,
"global_step": 94500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.02,
"learning_rate": 1.9894594822497684e-05,
"loss": 3.6148,
"step": 500
},
{
"epoch": 0.03,
"learning_rate": 1.9789189644995362e-05,
"loss": 3.3107,
"step": 1000
},
{
"epoch": 0.05,
"learning_rate": 1.9683784467493044e-05,
"loss": 3.2487,
"step": 1500
},
{
"epoch": 0.06,
"learning_rate": 1.9578379289990726e-05,
"loss": 3.1954,
"step": 2000
},
{
"epoch": 0.08,
"learning_rate": 1.947297411248841e-05,
"loss": 3.1585,
"step": 2500
},
{
"epoch": 0.09,
"learning_rate": 1.9367568934986087e-05,
"loss": 3.1312,
"step": 3000
},
{
"epoch": 0.11,
"learning_rate": 1.926216375748377e-05,
"loss": 3.0709,
"step": 3500
},
{
"epoch": 0.13,
"learning_rate": 1.9156758579981448e-05,
"loss": 3.0622,
"step": 4000
},
{
"epoch": 0.14,
"learning_rate": 1.905135340247913e-05,
"loss": 3.0556,
"step": 4500
},
{
"epoch": 0.16,
"learning_rate": 1.8945948224976815e-05,
"loss": 3.002,
"step": 5000
},
{
"epoch": 0.17,
"learning_rate": 1.8840543047474494e-05,
"loss": 3.0279,
"step": 5500
},
{
"epoch": 0.19,
"learning_rate": 1.8735137869972176e-05,
"loss": 2.9804,
"step": 6000
},
{
"epoch": 0.21,
"learning_rate": 1.8629732692469854e-05,
"loss": 2.988,
"step": 6500
},
{
"epoch": 0.22,
"learning_rate": 1.8524327514967536e-05,
"loss": 2.9664,
"step": 7000
},
{
"epoch": 0.24,
"learning_rate": 1.841892233746522e-05,
"loss": 2.9649,
"step": 7500
},
{
"epoch": 0.25,
"learning_rate": 1.83135171599629e-05,
"loss": 2.9827,
"step": 8000
},
{
"epoch": 0.27,
"learning_rate": 1.820811198246058e-05,
"loss": 2.9541,
"step": 8500
},
{
"epoch": 0.28,
"learning_rate": 1.810270680495826e-05,
"loss": 2.955,
"step": 9000
},
{
"epoch": 0.3,
"learning_rate": 1.7997301627455943e-05,
"loss": 2.9489,
"step": 9500
},
{
"epoch": 0.32,
"learning_rate": 1.7891896449953622e-05,
"loss": 2.9415,
"step": 10000
},
{
"epoch": 0.33,
"learning_rate": 1.7786491272451304e-05,
"loss": 2.9453,
"step": 10500
},
{
"epoch": 0.35,
"learning_rate": 1.7681086094948986e-05,
"loss": 2.9273,
"step": 11000
},
{
"epoch": 0.36,
"learning_rate": 1.7575680917446668e-05,
"loss": 2.935,
"step": 11500
},
{
"epoch": 0.38,
"learning_rate": 1.7470275739944346e-05,
"loss": 2.938,
"step": 12000
},
{
"epoch": 0.4,
"learning_rate": 1.736487056244203e-05,
"loss": 2.9441,
"step": 12500
},
{
"epoch": 0.41,
"learning_rate": 1.7259465384939707e-05,
"loss": 2.9091,
"step": 13000
},
{
"epoch": 0.43,
"learning_rate": 1.715406020743739e-05,
"loss": 2.9037,
"step": 13500
},
{
"epoch": 0.44,
"learning_rate": 1.704865502993507e-05,
"loss": 2.9112,
"step": 14000
},
{
"epoch": 0.46,
"learning_rate": 1.6943249852432753e-05,
"loss": 2.88,
"step": 14500
},
{
"epoch": 0.47,
"learning_rate": 1.6837844674930435e-05,
"loss": 2.9045,
"step": 15000
},
{
"epoch": 0.49,
"learning_rate": 1.6732439497428114e-05,
"loss": 2.9221,
"step": 15500
},
{
"epoch": 0.51,
"learning_rate": 1.6627034319925796e-05,
"loss": 2.9079,
"step": 16000
},
{
"epoch": 0.52,
"learning_rate": 1.6521629142423478e-05,
"loss": 2.8972,
"step": 16500
},
{
"epoch": 0.54,
"learning_rate": 1.641622396492116e-05,
"loss": 2.8958,
"step": 17000
},
{
"epoch": 0.55,
"learning_rate": 1.631081878741884e-05,
"loss": 2.9097,
"step": 17500
},
{
"epoch": 0.57,
"learning_rate": 1.620541360991652e-05,
"loss": 2.9103,
"step": 18000
},
{
"epoch": 0.58,
"learning_rate": 1.6100008432414203e-05,
"loss": 2.9001,
"step": 18500
},
{
"epoch": 0.6,
"learning_rate": 1.599460325491188e-05,
"loss": 2.8897,
"step": 19000
},
{
"epoch": 0.62,
"learning_rate": 1.5889198077409563e-05,
"loss": 2.9011,
"step": 19500
},
{
"epoch": 0.63,
"learning_rate": 1.5783792899907245e-05,
"loss": 2.8966,
"step": 20000
},
{
"epoch": 0.65,
"learning_rate": 1.5678387722404927e-05,
"loss": 2.8786,
"step": 20500
},
{
"epoch": 0.66,
"learning_rate": 1.5572982544902606e-05,
"loss": 2.8719,
"step": 21000
},
{
"epoch": 0.68,
"learning_rate": 1.5467577367400288e-05,
"loss": 2.8824,
"step": 21500
},
{
"epoch": 0.7,
"learning_rate": 1.5362172189897967e-05,
"loss": 2.8859,
"step": 22000
},
{
"epoch": 0.71,
"learning_rate": 1.525676701239565e-05,
"loss": 2.8539,
"step": 22500
},
{
"epoch": 0.73,
"learning_rate": 1.5151361834893332e-05,
"loss": 2.8895,
"step": 23000
},
{
"epoch": 0.74,
"learning_rate": 1.5045956657391013e-05,
"loss": 2.869,
"step": 23500
},
{
"epoch": 0.76,
"learning_rate": 1.4940551479888695e-05,
"loss": 2.8888,
"step": 24000
},
{
"epoch": 0.77,
"learning_rate": 1.4835146302386373e-05,
"loss": 2.8637,
"step": 24500
},
{
"epoch": 0.79,
"learning_rate": 1.4729741124884055e-05,
"loss": 2.8905,
"step": 25000
},
{
"epoch": 0.81,
"learning_rate": 1.4624335947381736e-05,
"loss": 2.8916,
"step": 25500
},
{
"epoch": 0.82,
"learning_rate": 1.4518930769879418e-05,
"loss": 2.8801,
"step": 26000
},
{
"epoch": 0.84,
"learning_rate": 1.4413525592377098e-05,
"loss": 2.8854,
"step": 26500
},
{
"epoch": 0.85,
"learning_rate": 1.430812041487478e-05,
"loss": 2.8838,
"step": 27000
},
{
"epoch": 0.87,
"learning_rate": 1.4202715237372462e-05,
"loss": 2.8701,
"step": 27500
},
{
"epoch": 0.89,
"learning_rate": 1.4097310059870142e-05,
"loss": 2.8749,
"step": 28000
},
{
"epoch": 0.9,
"learning_rate": 1.3991904882367824e-05,
"loss": 2.8488,
"step": 28500
},
{
"epoch": 0.92,
"learning_rate": 1.3886499704865503e-05,
"loss": 2.8685,
"step": 29000
},
{
"epoch": 0.93,
"learning_rate": 1.3781094527363185e-05,
"loss": 2.8526,
"step": 29500
},
{
"epoch": 0.95,
"learning_rate": 1.3675689349860865e-05,
"loss": 2.8521,
"step": 30000
},
{
"epoch": 0.96,
"learning_rate": 1.3570284172358547e-05,
"loss": 2.8803,
"step": 30500
},
{
"epoch": 0.98,
"learning_rate": 1.3464878994856228e-05,
"loss": 2.8803,
"step": 31000
},
{
"epoch": 1.0,
"learning_rate": 1.335947381735391e-05,
"loss": 2.8807,
"step": 31500
},
{
"epoch": 1.0,
"eval_loss": 2.767564058303833,
"eval_runtime": 6452.3126,
"eval_samples_per_second": 39.209,
"eval_steps_per_second": 4.901,
"step": 31624
},
{
"epoch": 1.01,
"learning_rate": 1.3254068639851592e-05,
"loss": 2.8679,
"step": 32000
},
{
"epoch": 1.03,
"learning_rate": 1.3148663462349272e-05,
"loss": 2.874,
"step": 32500
},
{
"epoch": 1.04,
"learning_rate": 1.3043258284846954e-05,
"loss": 2.8517,
"step": 33000
},
{
"epoch": 1.06,
"learning_rate": 1.2937853107344633e-05,
"loss": 2.8499,
"step": 33500
},
{
"epoch": 1.08,
"learning_rate": 1.2832447929842315e-05,
"loss": 2.8693,
"step": 34000
},
{
"epoch": 1.09,
"learning_rate": 1.2727042752339995e-05,
"loss": 2.8738,
"step": 34500
},
{
"epoch": 1.11,
"learning_rate": 1.2621637574837677e-05,
"loss": 2.8282,
"step": 35000
},
{
"epoch": 1.12,
"learning_rate": 1.2516232397335358e-05,
"loss": 2.8402,
"step": 35500
},
{
"epoch": 1.14,
"learning_rate": 1.241082721983304e-05,
"loss": 2.8686,
"step": 36000
},
{
"epoch": 1.15,
"learning_rate": 1.2305422042330722e-05,
"loss": 2.8629,
"step": 36500
},
{
"epoch": 1.17,
"learning_rate": 1.2200016864828402e-05,
"loss": 2.8643,
"step": 37000
},
{
"epoch": 1.19,
"learning_rate": 1.2094611687326084e-05,
"loss": 2.8254,
"step": 37500
},
{
"epoch": 1.2,
"learning_rate": 1.1989206509823763e-05,
"loss": 2.8569,
"step": 38000
},
{
"epoch": 1.22,
"learning_rate": 1.1883801332321445e-05,
"loss": 2.8581,
"step": 38500
},
{
"epoch": 1.23,
"learning_rate": 1.1778396154819125e-05,
"loss": 2.8454,
"step": 39000
},
{
"epoch": 1.25,
"learning_rate": 1.1672990977316807e-05,
"loss": 2.8462,
"step": 39500
},
{
"epoch": 1.26,
"learning_rate": 1.1567585799814487e-05,
"loss": 2.865,
"step": 40000
},
{
"epoch": 1.28,
"learning_rate": 1.146218062231217e-05,
"loss": 2.8448,
"step": 40500
},
{
"epoch": 1.3,
"learning_rate": 1.1356775444809851e-05,
"loss": 2.8366,
"step": 41000
},
{
"epoch": 1.31,
"learning_rate": 1.1251370267307532e-05,
"loss": 2.8472,
"step": 41500
},
{
"epoch": 1.33,
"learning_rate": 1.1145965089805214e-05,
"loss": 2.8406,
"step": 42000
},
{
"epoch": 1.34,
"learning_rate": 1.1040559912302892e-05,
"loss": 2.8612,
"step": 42500
},
{
"epoch": 1.36,
"learning_rate": 1.0935154734800574e-05,
"loss": 2.8621,
"step": 43000
},
{
"epoch": 1.38,
"learning_rate": 1.0829749557298255e-05,
"loss": 2.8466,
"step": 43500
},
{
"epoch": 1.39,
"learning_rate": 1.0724344379795937e-05,
"loss": 2.8412,
"step": 44000
},
{
"epoch": 1.41,
"learning_rate": 1.0618939202293617e-05,
"loss": 2.8483,
"step": 44500
},
{
"epoch": 1.42,
"learning_rate": 1.0513534024791299e-05,
"loss": 2.8502,
"step": 45000
},
{
"epoch": 1.44,
"learning_rate": 1.0408128847288981e-05,
"loss": 2.8451,
"step": 45500
},
{
"epoch": 1.45,
"learning_rate": 1.0302723669786661e-05,
"loss": 2.8406,
"step": 46000
},
{
"epoch": 1.47,
"learning_rate": 1.0197318492284343e-05,
"loss": 2.8499,
"step": 46500
},
{
"epoch": 1.49,
"learning_rate": 1.0091913314782022e-05,
"loss": 2.8533,
"step": 47000
},
{
"epoch": 1.5,
"learning_rate": 9.986508137279704e-06,
"loss": 2.8233,
"step": 47500
},
{
"epoch": 1.52,
"learning_rate": 9.881102959777386e-06,
"loss": 2.8373,
"step": 48000
},
{
"epoch": 1.53,
"learning_rate": 9.775697782275066e-06,
"loss": 2.8606,
"step": 48500
},
{
"epoch": 1.55,
"learning_rate": 9.670292604772747e-06,
"loss": 2.8385,
"step": 49000
},
{
"epoch": 1.57,
"learning_rate": 9.564887427270429e-06,
"loss": 2.8455,
"step": 49500
},
{
"epoch": 1.58,
"learning_rate": 9.459482249768109e-06,
"loss": 2.8701,
"step": 50000
},
{
"epoch": 1.6,
"learning_rate": 9.354077072265791e-06,
"loss": 2.8277,
"step": 50500
},
{
"epoch": 1.61,
"learning_rate": 9.248671894763471e-06,
"loss": 2.8681,
"step": 51000
},
{
"epoch": 1.63,
"learning_rate": 9.143266717261152e-06,
"loss": 2.8445,
"step": 51500
},
{
"epoch": 1.64,
"learning_rate": 9.037861539758834e-06,
"loss": 2.8545,
"step": 52000
},
{
"epoch": 1.66,
"learning_rate": 8.932456362256516e-06,
"loss": 2.8397,
"step": 52500
},
{
"epoch": 1.68,
"learning_rate": 8.827051184754196e-06,
"loss": 2.8442,
"step": 53000
},
{
"epoch": 1.69,
"learning_rate": 8.721646007251876e-06,
"loss": 2.8288,
"step": 53500
},
{
"epoch": 1.71,
"learning_rate": 8.616240829749558e-06,
"loss": 2.8278,
"step": 54000
},
{
"epoch": 1.72,
"learning_rate": 8.510835652247239e-06,
"loss": 2.835,
"step": 54500
},
{
"epoch": 1.74,
"learning_rate": 8.40543047474492e-06,
"loss": 2.8464,
"step": 55000
},
{
"epoch": 1.75,
"learning_rate": 8.300025297242601e-06,
"loss": 2.8251,
"step": 55500
},
{
"epoch": 1.77,
"learning_rate": 8.194620119740281e-06,
"loss": 2.8414,
"step": 56000
},
{
"epoch": 1.79,
"learning_rate": 8.089214942237964e-06,
"loss": 2.8335,
"step": 56500
},
{
"epoch": 1.8,
"learning_rate": 7.983809764735646e-06,
"loss": 2.8405,
"step": 57000
},
{
"epoch": 1.82,
"learning_rate": 7.878404587233326e-06,
"loss": 2.835,
"step": 57500
},
{
"epoch": 1.83,
"learning_rate": 7.772999409731006e-06,
"loss": 2.8239,
"step": 58000
},
{
"epoch": 1.85,
"learning_rate": 7.667594232228688e-06,
"loss": 2.8322,
"step": 58500
},
{
"epoch": 1.87,
"learning_rate": 7.5621890547263685e-06,
"loss": 2.8366,
"step": 59000
},
{
"epoch": 1.88,
"learning_rate": 7.45678387722405e-06,
"loss": 2.8391,
"step": 59500
},
{
"epoch": 1.9,
"learning_rate": 7.351378699721731e-06,
"loss": 2.8396,
"step": 60000
},
{
"epoch": 1.91,
"learning_rate": 7.245973522219411e-06,
"loss": 2.824,
"step": 60500
},
{
"epoch": 1.93,
"learning_rate": 7.140568344717092e-06,
"loss": 2.8282,
"step": 61000
},
{
"epoch": 1.94,
"learning_rate": 7.035163167214774e-06,
"loss": 2.8341,
"step": 61500
},
{
"epoch": 1.96,
"learning_rate": 6.929757989712456e-06,
"loss": 2.8247,
"step": 62000
},
{
"epoch": 1.98,
"learning_rate": 6.824352812210137e-06,
"loss": 2.837,
"step": 62500
},
{
"epoch": 1.99,
"learning_rate": 6.718947634707817e-06,
"loss": 2.8199,
"step": 63000
},
{
"epoch": 2.0,
"eval_loss": 2.7402868270874023,
"eval_runtime": 6430.6551,
"eval_samples_per_second": 39.341,
"eval_steps_per_second": 4.918,
"step": 63248
},
{
"epoch": 2.01,
"learning_rate": 6.613542457205498e-06,
"loss": 2.8177,
"step": 63500
},
{
"epoch": 2.02,
"learning_rate": 6.5081372797031795e-06,
"loss": 2.8361,
"step": 64000
},
{
"epoch": 2.04,
"learning_rate": 6.402732102200861e-06,
"loss": 2.8109,
"step": 64500
},
{
"epoch": 2.06,
"learning_rate": 6.297326924698541e-06,
"loss": 2.8399,
"step": 65000
},
{
"epoch": 2.07,
"learning_rate": 6.191921747196222e-06,
"loss": 2.8311,
"step": 65500
},
{
"epoch": 2.09,
"learning_rate": 6.086516569693904e-06,
"loss": 2.8227,
"step": 66000
},
{
"epoch": 2.1,
"learning_rate": 5.981111392191585e-06,
"loss": 2.8347,
"step": 66500
},
{
"epoch": 2.12,
"learning_rate": 5.8757062146892665e-06,
"loss": 2.8259,
"step": 67000
},
{
"epoch": 2.13,
"learning_rate": 5.770301037186947e-06,
"loss": 2.8554,
"step": 67500
},
{
"epoch": 2.15,
"learning_rate": 5.664895859684628e-06,
"loss": 2.8385,
"step": 68000
},
{
"epoch": 2.17,
"learning_rate": 5.559490682182309e-06,
"loss": 2.8042,
"step": 68500
},
{
"epoch": 2.18,
"learning_rate": 5.45408550467999e-06,
"loss": 2.8222,
"step": 69000
},
{
"epoch": 2.2,
"learning_rate": 5.348680327177671e-06,
"loss": 2.8209,
"step": 69500
},
{
"epoch": 2.21,
"learning_rate": 5.243275149675352e-06,
"loss": 2.836,
"step": 70000
},
{
"epoch": 2.23,
"learning_rate": 5.137869972173034e-06,
"loss": 2.8291,
"step": 70500
},
{
"epoch": 2.25,
"learning_rate": 5.032464794670715e-06,
"loss": 2.8257,
"step": 71000
},
{
"epoch": 2.26,
"learning_rate": 4.927059617168396e-06,
"loss": 2.8353,
"step": 71500
},
{
"epoch": 2.28,
"learning_rate": 4.8216544396660766e-06,
"loss": 2.8184,
"step": 72000
},
{
"epoch": 2.29,
"learning_rate": 4.716249262163758e-06,
"loss": 2.8308,
"step": 72500
},
{
"epoch": 2.31,
"learning_rate": 4.610844084661439e-06,
"loss": 2.8321,
"step": 73000
},
{
"epoch": 2.32,
"learning_rate": 4.50543890715912e-06,
"loss": 2.8366,
"step": 73500
},
{
"epoch": 2.34,
"learning_rate": 4.400033729656801e-06,
"loss": 2.8133,
"step": 74000
},
{
"epoch": 2.36,
"learning_rate": 4.2946285521544825e-06,
"loss": 2.8063,
"step": 74500
},
{
"epoch": 2.37,
"learning_rate": 4.189223374652164e-06,
"loss": 2.8334,
"step": 75000
},
{
"epoch": 2.39,
"learning_rate": 4.083818197149844e-06,
"loss": 2.8103,
"step": 75500
},
{
"epoch": 2.4,
"learning_rate": 3.978413019647526e-06,
"loss": 2.8221,
"step": 76000
},
{
"epoch": 2.42,
"learning_rate": 3.873007842145206e-06,
"loss": 2.817,
"step": 76500
},
{
"epoch": 2.43,
"learning_rate": 3.7676026646428875e-06,
"loss": 2.8334,
"step": 77000
},
{
"epoch": 2.45,
"learning_rate": 3.6621974871405687e-06,
"loss": 2.8423,
"step": 77500
},
{
"epoch": 2.47,
"learning_rate": 3.5567923096382494e-06,
"loss": 2.8247,
"step": 78000
},
{
"epoch": 2.48,
"learning_rate": 3.451387132135931e-06,
"loss": 2.8373,
"step": 78500
},
{
"epoch": 2.5,
"learning_rate": 3.3459819546336118e-06,
"loss": 2.8324,
"step": 79000
},
{
"epoch": 2.51,
"learning_rate": 3.240576777131293e-06,
"loss": 2.8312,
"step": 79500
},
{
"epoch": 2.53,
"learning_rate": 3.1351715996289737e-06,
"loss": 2.8452,
"step": 80000
},
{
"epoch": 2.55,
"learning_rate": 3.029766422126655e-06,
"loss": 2.8331,
"step": 80500
},
{
"epoch": 2.56,
"learning_rate": 2.9243612446243365e-06,
"loss": 2.8209,
"step": 81000
},
{
"epoch": 2.58,
"learning_rate": 2.8189560671220172e-06,
"loss": 2.8289,
"step": 81500
},
{
"epoch": 2.59,
"learning_rate": 2.7135508896196984e-06,
"loss": 2.8415,
"step": 82000
},
{
"epoch": 2.61,
"learning_rate": 2.608145712117379e-06,
"loss": 2.8256,
"step": 82500
},
{
"epoch": 2.62,
"learning_rate": 2.5027405346150608e-06,
"loss": 2.8257,
"step": 83000
},
{
"epoch": 2.64,
"learning_rate": 2.3973353571127415e-06,
"loss": 2.833,
"step": 83500
},
{
"epoch": 2.66,
"learning_rate": 2.2919301796104227e-06,
"loss": 2.8348,
"step": 84000
},
{
"epoch": 2.67,
"learning_rate": 2.186525002108104e-06,
"loss": 2.8292,
"step": 84500
},
{
"epoch": 2.69,
"learning_rate": 2.081119824605785e-06,
"loss": 2.8259,
"step": 85000
},
{
"epoch": 2.7,
"learning_rate": 1.9757146471034658e-06,
"loss": 2.8319,
"step": 85500
},
{
"epoch": 2.72,
"learning_rate": 1.870309469601147e-06,
"loss": 2.8094,
"step": 86000
},
{
"epoch": 2.74,
"learning_rate": 1.764904292098828e-06,
"loss": 2.8276,
"step": 86500
},
{
"epoch": 2.75,
"learning_rate": 1.659499114596509e-06,
"loss": 2.8325,
"step": 87000
},
{
"epoch": 2.77,
"learning_rate": 1.55409393709419e-06,
"loss": 2.8247,
"step": 87500
},
{
"epoch": 2.78,
"learning_rate": 1.4486887595918715e-06,
"loss": 2.8363,
"step": 88000
},
{
"epoch": 2.8,
"learning_rate": 1.3432835820895524e-06,
"loss": 2.8343,
"step": 88500
},
{
"epoch": 2.81,
"learning_rate": 1.2378784045872334e-06,
"loss": 2.8325,
"step": 89000
},
{
"epoch": 2.83,
"learning_rate": 1.1324732270849146e-06,
"loss": 2.8316,
"step": 89500
},
{
"epoch": 2.85,
"learning_rate": 1.0270680495825955e-06,
"loss": 2.8232,
"step": 90000
},
{
"epoch": 2.86,
"learning_rate": 9.216628720802767e-07,
"loss": 2.8241,
"step": 90500
},
{
"epoch": 2.88,
"learning_rate": 8.162576945779577e-07,
"loss": 2.8203,
"step": 91000
},
{
"epoch": 2.89,
"learning_rate": 7.108525170756387e-07,
"loss": 2.8356,
"step": 91500
},
{
"epoch": 2.91,
"learning_rate": 6.054473395733199e-07,
"loss": 2.8509,
"step": 92000
},
{
"epoch": 2.92,
"learning_rate": 5.00042162071001e-07,
"loss": 2.8315,
"step": 92500
},
{
"epoch": 2.94,
"learning_rate": 3.9463698456868205e-07,
"loss": 2.8223,
"step": 93000
},
{
"epoch": 2.96,
"learning_rate": 2.892318070663631e-07,
"loss": 2.8368,
"step": 93500
},
{
"epoch": 2.97,
"learning_rate": 1.838266295640442e-07,
"loss": 2.8271,
"step": 94000
},
{
"epoch": 2.99,
"learning_rate": 7.842145206172527e-08,
"loss": 2.8375,
"step": 94500
}
],
"logging_steps": 500,
"max_steps": 94872,
"num_train_epochs": 3,
"save_steps": 500,
"total_flos": 7.025524751644754e+17,
"trial_name": null,
"trial_params": null
}