| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.9991950630533941, |
| "eval_steps": 500, |
| "global_step": 931, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0010732492621411322, |
| "grad_norm": 22.296275963984172, |
| "learning_rate": 1.0638297872340426e-07, |
| "loss": 1.3042, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.005366246310705662, |
| "grad_norm": 20.47367655843234, |
| "learning_rate": 5.319148936170213e-07, |
| "loss": 1.3018, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.010732492621411323, |
| "grad_norm": 8.599621080094153, |
| "learning_rate": 1.0638297872340427e-06, |
| "loss": 1.1903, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.016098738932116986, |
| "grad_norm": 8.578183948965831, |
| "learning_rate": 1.595744680851064e-06, |
| "loss": 1.0319, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.021464985242822646, |
| "grad_norm": 3.3186946669144883, |
| "learning_rate": 2.1276595744680853e-06, |
| "loss": 0.9077, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.026831231553528307, |
| "grad_norm": 2.4208456794965487, |
| "learning_rate": 2.6595744680851065e-06, |
| "loss": 0.8688, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.03219747786423397, |
| "grad_norm": 2.3084720872118303, |
| "learning_rate": 3.191489361702128e-06, |
| "loss": 0.8445, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.03756372417493963, |
| "grad_norm": 2.47946578003686, |
| "learning_rate": 3.723404255319149e-06, |
| "loss": 0.8252, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.04292997048564529, |
| "grad_norm": 2.3070118168825586, |
| "learning_rate": 4.255319148936171e-06, |
| "loss": 0.8045, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.04829621679635095, |
| "grad_norm": 2.347470725755473, |
| "learning_rate": 4.787234042553192e-06, |
| "loss": 0.784, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.053662463107056614, |
| "grad_norm": 2.399247076216005, |
| "learning_rate": 5.319148936170213e-06, |
| "loss": 0.782, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.059028709417762275, |
| "grad_norm": 2.3895963062252057, |
| "learning_rate": 5.851063829787235e-06, |
| "loss": 0.7509, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.06439495572846794, |
| "grad_norm": 2.3434507391593926, |
| "learning_rate": 6.382978723404256e-06, |
| "loss": 0.7477, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.0697612020391736, |
| "grad_norm": 2.23138891237801, |
| "learning_rate": 6.914893617021278e-06, |
| "loss": 0.7313, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.07512744834987926, |
| "grad_norm": 2.3748774018346874, |
| "learning_rate": 7.446808510638298e-06, |
| "loss": 0.7191, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.08049369466058492, |
| "grad_norm": 2.336896984412136, |
| "learning_rate": 7.97872340425532e-06, |
| "loss": 0.7153, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.08585994097129059, |
| "grad_norm": 2.169053338540135, |
| "learning_rate": 8.510638297872341e-06, |
| "loss": 0.7155, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.09122618728199625, |
| "grad_norm": 2.265230463284654, |
| "learning_rate": 9.042553191489362e-06, |
| "loss": 0.6957, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.0965924335927019, |
| "grad_norm": 2.3476311708110935, |
| "learning_rate": 9.574468085106385e-06, |
| "loss": 0.6938, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.10195867990340757, |
| "grad_norm": 2.284843693991532, |
| "learning_rate": 9.999964780082996e-06, |
| "loss": 0.6955, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.10732492621411323, |
| "grad_norm": 2.18921254485461, |
| "learning_rate": 9.998732135085665e-06, |
| "loss": 0.6834, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.11269117252481889, |
| "grad_norm": 2.1101783381393417, |
| "learning_rate": 9.995738990383743e-06, |
| "loss": 0.6806, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.11805741883552455, |
| "grad_norm": 2.370512013174141, |
| "learning_rate": 9.990986400130607e-06, |
| "loss": 0.6769, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.12342366514623021, |
| "grad_norm": 2.1335004655744396, |
| "learning_rate": 9.984476038137437e-06, |
| "loss": 0.686, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.12878991145693588, |
| "grad_norm": 2.1669988426080673, |
| "learning_rate": 9.97621019728372e-06, |
| "loss": 0.6861, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.13415615776764153, |
| "grad_norm": 2.382311101815907, |
| "learning_rate": 9.966191788709716e-06, |
| "loss": 0.667, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.1395224040783472, |
| "grad_norm": 2.0401210704281905, |
| "learning_rate": 9.954424340791195e-06, |
| "loss": 0.6661, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.14488865038905285, |
| "grad_norm": 1.997533760617226, |
| "learning_rate": 9.940911997896774e-06, |
| "loss": 0.6649, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.15025489669975853, |
| "grad_norm": 2.195901881231756, |
| "learning_rate": 9.925659518928316e-06, |
| "loss": 0.6616, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.15562114301046417, |
| "grad_norm": 2.0610833935176553, |
| "learning_rate": 9.908672275644898e-06, |
| "loss": 0.6618, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.16098738932116985, |
| "grad_norm": 1.9308969816226162, |
| "learning_rate": 9.889956250770933e-06, |
| "loss": 0.6573, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.1663536356318755, |
| "grad_norm": 2.2067283334504384, |
| "learning_rate": 9.86951803588912e-06, |
| "loss": 0.6505, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.17171988194258117, |
| "grad_norm": 2.163844105631547, |
| "learning_rate": 9.847364829118963e-06, |
| "loss": 0.6352, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.17708612825328682, |
| "grad_norm": 2.514522017128904, |
| "learning_rate": 9.82350443258166e-06, |
| "loss": 0.644, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.1824523745639925, |
| "grad_norm": 2.3092372737846016, |
| "learning_rate": 9.797945249652295e-06, |
| "loss": 0.6401, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.18781862087469814, |
| "grad_norm": 2.086913451295013, |
| "learning_rate": 9.770696282000245e-06, |
| "loss": 0.6317, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.1931848671854038, |
| "grad_norm": 4.263008656041996, |
| "learning_rate": 9.741767126418898e-06, |
| "loss": 0.6352, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.19855111349610946, |
| "grad_norm": 6.669287827969328, |
| "learning_rate": 9.711167971445766e-06, |
| "loss": 0.6245, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.20391735980681513, |
| "grad_norm": 2.418998473641885, |
| "learning_rate": 9.67890959377418e-06, |
| "loss": 0.6382, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.20928360611752078, |
| "grad_norm": 1.9261419429094924, |
| "learning_rate": 9.645003354457872e-06, |
| "loss": 0.6215, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.21464985242822646, |
| "grad_norm": 2.414448676465819, |
| "learning_rate": 9.60946119490972e-06, |
| "loss": 0.6146, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.2200160987389321, |
| "grad_norm": 1.885911480793101, |
| "learning_rate": 9.57229563269612e-06, |
| "loss": 0.6162, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.22538234504963778, |
| "grad_norm": 2.008464028829634, |
| "learning_rate": 9.533519757128426e-06, |
| "loss": 0.6156, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.23074859136034345, |
| "grad_norm": 2.16331315927476, |
| "learning_rate": 9.49314722465304e-06, |
| "loss": 0.6069, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.2361148376710491, |
| "grad_norm": 1.973961982068933, |
| "learning_rate": 9.451192254041759e-06, |
| "loss": 0.606, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.24148108398175477, |
| "grad_norm": 2.0087656849792186, |
| "learning_rate": 9.407669621384073e-06, |
| "loss": 0.6094, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.24684733029246042, |
| "grad_norm": 1.954551142386887, |
| "learning_rate": 9.362594654883185e-06, |
| "loss": 0.6041, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.25221357660316607, |
| "grad_norm": 1.929191512896689, |
| "learning_rate": 9.31598322945759e-06, |
| "loss": 0.6079, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.25757982291387177, |
| "grad_norm": 1.9441153007960759, |
| "learning_rate": 9.267851761150092e-06, |
| "loss": 0.6047, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.2629460692245774, |
| "grad_norm": 1.9607729270332375, |
| "learning_rate": 9.218217201346251e-06, |
| "loss": 0.5979, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.26831231553528306, |
| "grad_norm": 2.0964944576432205, |
| "learning_rate": 9.167097030804289e-06, |
| "loss": 0.5974, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.2736785618459887, |
| "grad_norm": 2.064418571453017, |
| "learning_rate": 9.114509253498554e-06, |
| "loss": 0.5911, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.2790448081566944, |
| "grad_norm": 1.9882015589578523, |
| "learning_rate": 9.060472390278717e-06, |
| "loss": 0.5852, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.28441105446740006, |
| "grad_norm": 1.8956409406412695, |
| "learning_rate": 9.005005472346923e-06, |
| "loss": 0.5893, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.2897773007781057, |
| "grad_norm": 2.420717269019028, |
| "learning_rate": 8.948128034555212e-06, |
| "loss": 0.5852, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.29514354708881135, |
| "grad_norm": 2.0430945196466825, |
| "learning_rate": 8.889860108525544e-06, |
| "loss": 0.5776, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.30050979339951706, |
| "grad_norm": 2.131587227034572, |
| "learning_rate": 8.83022221559489e-06, |
| "loss": 0.5783, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.3058760397102227, |
| "grad_norm": 2.012144077782948, |
| "learning_rate": 8.76923535958783e-06, |
| "loss": 0.5739, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.31124228602092835, |
| "grad_norm": 1.9798793555936918, |
| "learning_rate": 8.706921019419237e-06, |
| "loss": 0.5743, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.316608532331634, |
| "grad_norm": 2.1981871661379784, |
| "learning_rate": 8.643301141529619e-06, |
| "loss": 0.5537, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.3219747786423397, |
| "grad_norm": 1.9783993595823293, |
| "learning_rate": 8.578398132155846e-06, |
| "loss": 0.5736, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.32734102495304535, |
| "grad_norm": 2.266646671924457, |
| "learning_rate": 8.512234849439887e-06, |
| "loss": 0.5605, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.332707271263751, |
| "grad_norm": 2.0388062627147003, |
| "learning_rate": 8.444834595378434e-06, |
| "loss": 0.5541, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.3380735175744567, |
| "grad_norm": 1.8746273235590827, |
| "learning_rate": 8.376221107616187e-06, |
| "loss": 0.5493, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.34343976388516234, |
| "grad_norm": 1.9641687714089828, |
| "learning_rate": 8.306418551085707e-06, |
| "loss": 0.5473, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.348806010195868, |
| "grad_norm": 2.0231873134828047, |
| "learning_rate": 8.23545150949679e-06, |
| "loss": 0.5382, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.35417225650657364, |
| "grad_norm": 1.9081132594420114, |
| "learning_rate": 8.163344976678342e-06, |
| "loss": 0.5415, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.35953850281727934, |
| "grad_norm": 1.8808578978116353, |
| "learning_rate": 8.090124347775837e-06, |
| "loss": 0.5413, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.364904749127985, |
| "grad_norm": 2.069476839747095, |
| "learning_rate": 8.0158154103074e-06, |
| "loss": 0.5378, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.37027099543869063, |
| "grad_norm": 2.1043362965325105, |
| "learning_rate": 7.940444335081733e-06, |
| "loss": 0.5553, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.3756372417493963, |
| "grad_norm": 2.0354788880153682, |
| "learning_rate": 7.864037666981037e-06, |
| "loss": 0.541, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.381003488060102, |
| "grad_norm": 2.064254592161078, |
| "learning_rate": 7.786622315612182e-06, |
| "loss": 0.5329, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.3863697343708076, |
| "grad_norm": 1.9234108182391954, |
| "learning_rate": 7.708225545829446e-06, |
| "loss": 0.5226, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.3917359806815133, |
| "grad_norm": 1.9515453224494816, |
| "learning_rate": 7.6288749681321115e-06, |
| "loss": 0.5277, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.3971022269922189, |
| "grad_norm": 1.9085592553037638, |
| "learning_rate": 7.548598528940354e-06, |
| "loss": 0.5285, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.4024684733029246, |
| "grad_norm": 1.870687980940359, |
| "learning_rate": 7.4674245007528135e-06, |
| "loss": 0.5247, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.40783471961363027, |
| "grad_norm": 2.077267366579556, |
| "learning_rate": 7.385381472189321e-06, |
| "loss": 0.5116, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.4132009659243359, |
| "grad_norm": 1.9454145271664978, |
| "learning_rate": 7.302498337922293e-06, |
| "loss": 0.5213, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.41856721223504156, |
| "grad_norm": 1.9119626674890595, |
| "learning_rate": 7.218804288500343e-06, |
| "loss": 0.5264, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.42393345854574727, |
| "grad_norm": 2.022414838693354, |
| "learning_rate": 7.134328800067684e-06, |
| "loss": 0.5194, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.4292997048564529, |
| "grad_norm": 2.092163320659975, |
| "learning_rate": 7.049101623982938e-06, |
| "loss": 0.5131, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.43466595116715856, |
| "grad_norm": 2.0021479768579558, |
| "learning_rate": 6.963152776341044e-06, |
| "loss": 0.5041, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.4400321974778642, |
| "grad_norm": 2.0327320697895814, |
| "learning_rate": 6.876512527401897e-06, |
| "loss": 0.5052, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.4453984437885699, |
| "grad_norm": 2.2412216150911832, |
| "learning_rate": 6.789211390929497e-06, |
| "loss": 0.5054, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.45076469009927556, |
| "grad_norm": 2.115153656464059, |
| "learning_rate": 6.701280113445324e-06, |
| "loss": 0.5061, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.4561309364099812, |
| "grad_norm": 1.9249370789560831, |
| "learning_rate": 6.6127496633997475e-06, |
| "loss": 0.4842, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.4614971827206869, |
| "grad_norm": 1.953527868661831, |
| "learning_rate": 6.523651220265269e-06, |
| "loss": 0.4949, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.46686342903139255, |
| "grad_norm": 1.8982198151481615, |
| "learning_rate": 6.434016163555452e-06, |
| "loss": 0.491, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.4722296753420982, |
| "grad_norm": 1.841890965273987, |
| "learning_rate": 6.343876061773385e-06, |
| "loss": 0.487, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.47759592165280385, |
| "grad_norm": 1.9387691599261847, |
| "learning_rate": 6.2532626612936035e-06, |
| "loss": 0.4736, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.48296216796350955, |
| "grad_norm": 1.861661855956454, |
| "learning_rate": 6.162207875181354e-06, |
| "loss": 0.4787, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.4883284142742152, |
| "grad_norm": 1.866771313626566, |
| "learning_rate": 6.070743771953157e-06, |
| "loss": 0.4707, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.49369466058492084, |
| "grad_norm": 1.8847305365601024, |
| "learning_rate": 5.978902564282616e-06, |
| "loss": 0.4764, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.4990609068956265, |
| "grad_norm": 2.0081377194420384, |
| "learning_rate": 5.886716597655472e-06, |
| "loss": 0.4728, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.5044271532063321, |
| "grad_norm": 2.040321737831785, |
| "learning_rate": 5.7942183389778536e-06, |
| "loss": 0.4702, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.5097933995170378, |
| "grad_norm": 2.121711837077905, |
| "learning_rate": 5.701440365141799e-06, |
| "loss": 0.4629, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.5151596458277435, |
| "grad_norm": 1.8953898552423818, |
| "learning_rate": 5.608415351552014e-06, |
| "loss": 0.4618, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.5205258921384491, |
| "grad_norm": 1.9319156687040069, |
| "learning_rate": 5.515176060617945e-06, |
| "loss": 0.4645, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.5258921384491548, |
| "grad_norm": 2.0580844567420753, |
| "learning_rate": 5.421755330215223e-06, |
| "loss": 0.4513, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.5312583847598604, |
| "grad_norm": 1.9986754618051978, |
| "learning_rate": 5.328186062120509e-06, |
| "loss": 0.4667, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.5366246310705661, |
| "grad_norm": 1.8960453690474102, |
| "learning_rate": 5.23450121042383e-06, |
| "loss": 0.4673, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.5419908773812718, |
| "grad_norm": 1.905017636620808, |
| "learning_rate": 5.140733769922525e-06, |
| "loss": 0.4537, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.5473571236919774, |
| "grad_norm": 1.955396324000518, |
| "learning_rate": 5.0469167645008245e-06, |
| "loss": 0.4478, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.5527233700026831, |
| "grad_norm": 1.9184357161762489, |
| "learning_rate": 4.953083235499177e-06, |
| "loss": 0.4455, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.5580896163133888, |
| "grad_norm": 1.8626012702880614, |
| "learning_rate": 4.859266230077474e-06, |
| "loss": 0.4435, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.5634558626240944, |
| "grad_norm": 1.8866714654623984, |
| "learning_rate": 4.7654987895761705e-06, |
| "loss": 0.4492, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.5688221089348001, |
| "grad_norm": 1.9997800662285936, |
| "learning_rate": 4.671813937879494e-06, |
| "loss": 0.4552, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.5741883552455058, |
| "grad_norm": 2.008542669530135, |
| "learning_rate": 4.5782446697847775e-06, |
| "loss": 0.4329, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.5795546015562114, |
| "grad_norm": 1.9257375981696094, |
| "learning_rate": 4.484823939382056e-06, |
| "loss": 0.4433, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.5849208478669171, |
| "grad_norm": 2.018856467121276, |
| "learning_rate": 4.391584648447989e-06, |
| "loss": 0.4352, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.5902870941776227, |
| "grad_norm": 1.8446623655945045, |
| "learning_rate": 4.298559634858202e-06, |
| "loss": 0.4338, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.5956533404883284, |
| "grad_norm": 1.9846432448897824, |
| "learning_rate": 4.205781661022146e-06, |
| "loss": 0.4228, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.6010195867990341, |
| "grad_norm": 1.9065968436703373, |
| "learning_rate": 4.1132834023445304e-06, |
| "loss": 0.4259, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.6063858331097397, |
| "grad_norm": 1.972887566791961, |
| "learning_rate": 4.021097435717386e-06, |
| "loss": 0.4179, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.6117520794204454, |
| "grad_norm": 2.085630898921528, |
| "learning_rate": 3.929256228046845e-06, |
| "loss": 0.4205, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.6171183257311511, |
| "grad_norm": 1.9369874318383387, |
| "learning_rate": 3.837792124818647e-06, |
| "loss": 0.4247, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.6224845720418567, |
| "grad_norm": 1.8597647904112595, |
| "learning_rate": 3.7467373387063973e-06, |
| "loss": 0.4205, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.6278508183525624, |
| "grad_norm": 1.7821892594233963, |
| "learning_rate": 3.656123938226618e-06, |
| "loss": 0.4046, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.633217064663268, |
| "grad_norm": 1.9833859795715034, |
| "learning_rate": 3.5659838364445505e-06, |
| "loss": 0.4111, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.6385833109739737, |
| "grad_norm": 1.944780902162987, |
| "learning_rate": 3.476348779734732e-06, |
| "loss": 0.415, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.6439495572846794, |
| "grad_norm": 1.9550245947033087, |
| "learning_rate": 3.387250336600254e-06, |
| "loss": 0.4191, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.649315803595385, |
| "grad_norm": 1.8117200718403572, |
| "learning_rate": 3.298719886554677e-06, |
| "loss": 0.3993, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.6546820499060907, |
| "grad_norm": 1.940882592630498, |
| "learning_rate": 3.2107886090705035e-06, |
| "loss": 0.4081, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.6600482962167964, |
| "grad_norm": 1.8525749922673174, |
| "learning_rate": 3.1234874725981045e-06, |
| "loss": 0.4076, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.665414542527502, |
| "grad_norm": 1.9210849233799236, |
| "learning_rate": 3.036847223658958e-06, |
| "loss": 0.4108, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.6707807888382077, |
| "grad_norm": 1.9000639666031305, |
| "learning_rate": 2.950898376017064e-06, |
| "loss": 0.4034, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.6761470351489134, |
| "grad_norm": 2.003614087267324, |
| "learning_rate": 2.865671199932318e-06, |
| "loss": 0.4132, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.681513281459619, |
| "grad_norm": 1.9763936407420173, |
| "learning_rate": 2.781195711499658e-06, |
| "loss": 0.4016, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.6868795277703247, |
| "grad_norm": 1.9016795114295642, |
| "learning_rate": 2.697501662077707e-06, |
| "loss": 0.401, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.6922457740810303, |
| "grad_norm": 1.9624570132442953, |
| "learning_rate": 2.6146185278106807e-06, |
| "loss": 0.3933, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.697612020391736, |
| "grad_norm": 1.8275718245227606, |
| "learning_rate": 2.5325754992471886e-06, |
| "loss": 0.393, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.7029782667024417, |
| "grad_norm": 1.8709439923940803, |
| "learning_rate": 2.4514014710596467e-06, |
| "loss": 0.388, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.7083445130131473, |
| "grad_norm": 1.911631586090792, |
| "learning_rate": 2.3711250318678906e-06, |
| "loss": 0.3941, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.713710759323853, |
| "grad_norm": 1.7745399065562626, |
| "learning_rate": 2.2917744541705544e-06, |
| "loss": 0.3865, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.7190770056345587, |
| "grad_norm": 1.998234024876106, |
| "learning_rate": 2.2133776843878185e-06, |
| "loss": 0.3866, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.7244432519452643, |
| "grad_norm": 1.808681557692019, |
| "learning_rate": 2.1359623330189655e-06, |
| "loss": 0.3856, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.72980949825597, |
| "grad_norm": 1.8088686581817492, |
| "learning_rate": 2.059555664918268e-06, |
| "loss": 0.3901, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.7351757445666756, |
| "grad_norm": 1.8039745722281364, |
| "learning_rate": 1.9841845896926022e-06, |
| "loss": 0.3829, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.7405419908773813, |
| "grad_norm": 1.8674727402680937, |
| "learning_rate": 1.9098756522241634e-06, |
| "loss": 0.3768, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.745908237188087, |
| "grad_norm": 1.9068171536750957, |
| "learning_rate": 1.8366550233216584e-06, |
| "loss": 0.3812, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.7512744834987926, |
| "grad_norm": 1.9765087911769241, |
| "learning_rate": 1.7645484905032129e-06, |
| "loss": 0.3764, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.7566407298094983, |
| "grad_norm": 1.775516730227496, |
| "learning_rate": 1.6935814489142937e-06, |
| "loss": 0.3797, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.762006976120204, |
| "grad_norm": 1.7646714298996984, |
| "learning_rate": 1.6237788923838149e-06, |
| "loss": 0.371, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.7673732224309096, |
| "grad_norm": 1.8315846638175772, |
| "learning_rate": 1.555165404621567e-06, |
| "loss": 0.3835, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.7727394687416153, |
| "grad_norm": 1.8505733173378243, |
| "learning_rate": 1.487765150560116e-06, |
| "loss": 0.3687, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.778105715052321, |
| "grad_norm": 1.7848924875740815, |
| "learning_rate": 1.4216018678441558e-06, |
| "loss": 0.3724, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.7834719613630265, |
| "grad_norm": 1.7527547920489353, |
| "learning_rate": 1.3566988584703817e-06, |
| "loss": 0.3676, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.7888382076737323, |
| "grad_norm": 2.017475663507191, |
| "learning_rate": 1.293078980580766e-06, |
| "loss": 0.3624, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.7942044539844378, |
| "grad_norm": 1.932581243094435, |
| "learning_rate": 1.2307646404121692e-06, |
| "loss": 0.3701, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.7995707002951435, |
| "grad_norm": 1.8424247641839024, |
| "learning_rate": 1.1697777844051105e-06, |
| "loss": 0.3711, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.8049369466058492, |
| "grad_norm": 1.8159029145907732, |
| "learning_rate": 1.1101398914744565e-06, |
| "loss": 0.3633, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.8103031929165548, |
| "grad_norm": 2.055480043240695, |
| "learning_rate": 1.0518719654447896e-06, |
| "loss": 0.3623, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.8156694392272605, |
| "grad_norm": 1.7975799838896245, |
| "learning_rate": 9.949945276530782e-07, |
| "loss": 0.3606, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.8210356855379662, |
| "grad_norm": 1.7631852825296395, |
| "learning_rate": 9.395276097212841e-07, |
| "loss": 0.3592, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.8264019318486718, |
| "grad_norm": 1.7910286892268594, |
| "learning_rate": 8.854907465014479e-07, |
| "loss": 0.3551, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.8317681781593775, |
| "grad_norm": 1.8158143036353944, |
| "learning_rate": 8.329029691957124e-07, |
| "loss": 0.364, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.8371344244700831, |
| "grad_norm": 1.9191566287165054, |
| "learning_rate": 7.817827986537508e-07, |
| "loss": 0.3553, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.8425006707807888, |
| "grad_norm": 1.9001999154855205, |
| "learning_rate": 7.321482388499096e-07, |
| "loss": 0.3599, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.8478669170914945, |
| "grad_norm": 1.8486152336637562, |
| "learning_rate": 6.840167705424106e-07, |
| "loss": 0.3601, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.8532331634022001, |
| "grad_norm": 1.7749346973416724, |
| "learning_rate": 6.374053451168166e-07, |
| "loss": 0.3549, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.8585994097129058, |
| "grad_norm": 1.8218491540968815, |
| "learning_rate": 5.92330378615929e-07, |
| "loss": 0.3645, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.8639656560236115, |
| "grad_norm": 1.783717297938072, |
| "learning_rate": 5.488077459582425e-07, |
| "loss": 0.3583, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.8693319023343171, |
| "grad_norm": 1.7473164870091953, |
| "learning_rate": 5.068527753469604e-07, |
| "loss": 0.353, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.8746981486450228, |
| "grad_norm": 1.6866190577833358, |
| "learning_rate": 4.664802428715753e-07, |
| "loss": 0.3569, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.8800643949557284, |
| "grad_norm": 1.7605022217720974, |
| "learning_rate": 4.2770436730388166e-07, |
| "loss": 0.3427, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.8854306412664341, |
| "grad_norm": 1.748026700381262, |
| "learning_rate": 3.9053880509028086e-07, |
| "loss": 0.3556, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.8907968875771398, |
| "grad_norm": 1.82694857458379, |
| "learning_rate": 3.549966455421305e-07, |
| "loss": 0.3569, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.8961631338878454, |
| "grad_norm": 1.7449903357160679, |
| "learning_rate": 3.2109040622582186e-07, |
| "loss": 0.3567, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.9015293801985511, |
| "grad_norm": 1.765178738808068, |
| "learning_rate": 2.8883202855423676e-07, |
| "loss": 0.3464, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.9068956265092568, |
| "grad_norm": 1.8646790389554087, |
| "learning_rate": 2.582328735811029e-07, |
| "loss": 0.3485, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.9122618728199624, |
| "grad_norm": 1.7327234524732287, |
| "learning_rate": 2.2930371799975593e-07, |
| "loss": 0.3548, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.9176281191306681, |
| "grad_norm": 1.7873784801521655, |
| "learning_rate": 2.0205475034770606e-07, |
| "loss": 0.3529, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.9229943654413738, |
| "grad_norm": 1.8492213076499473, |
| "learning_rate": 1.7649556741833995e-07, |
| "loss": 0.3473, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.9283606117520794, |
| "grad_norm": 1.8273172776853641, |
| "learning_rate": 1.5263517088103862e-07, |
| "loss": 0.351, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.9337268580627851, |
| "grad_norm": 1.9136908811797346, |
| "learning_rate": 1.304819641108801e-07, |
| "loss": 0.3471, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.9390931043734907, |
| "grad_norm": 1.7762316103832752, |
| "learning_rate": 1.1004374922906846e-07, |
| "loss": 0.3412, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.9444593506841964, |
| "grad_norm": 1.9275493541172453, |
| "learning_rate": 9.132772435510362e-08, |
| "loss": 0.3566, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.9498255969949021, |
| "grad_norm": 1.714257495081281, |
| "learning_rate": 7.434048107168523e-08, |
| "loss": 0.3426, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.9551918433056077, |
| "grad_norm": 1.7853244835501505, |
| "learning_rate": 5.908800210322696e-08, |
| "loss": 0.3455, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.9605580896163134, |
| "grad_norm": 1.7339077062506096, |
| "learning_rate": 4.55756592088058e-08, |
| "loss": 0.3456, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.9659243359270191, |
| "grad_norm": 1.8108202681226637, |
| "learning_rate": 3.3808211290284886e-08, |
| "loss": 0.3475, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.9712905822377247, |
| "grad_norm": 1.7215289002872909, |
| "learning_rate": 2.378980271628195e-08, |
| "loss": 0.3417, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.9766568285484304, |
| "grad_norm": 1.7249368209329086, |
| "learning_rate": 1.552396186256411e-08, |
| "loss": 0.3469, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.982023074859136, |
| "grad_norm": 1.7553769156007077, |
| "learning_rate": 9.013599869394096e-09, |
| "loss": 0.3512, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.9873893211698417, |
| "grad_norm": 1.734616112736488, |
| "learning_rate": 4.261009616257638e-09, |
| "loss": 0.3459, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.9927555674805474, |
| "grad_norm": 1.7507232934356496, |
| "learning_rate": 1.2678649143349485e-09, |
| "loss": 0.3454, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.998121813791253, |
| "grad_norm": 1.8191481247099934, |
| "learning_rate": 3.5219917003948003e-11, |
| "loss": 0.3481, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.9991950630533941, |
| "eval_loss": 0.3430202901363373, |
| "eval_runtime": 96.876, |
| "eval_samples_per_second": 3.117, |
| "eval_steps_per_second": 0.785, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.9991950630533941, |
| "step": 931, |
| "total_flos": 194880114524160.0, |
| "train_loss": 0.5082111883624412, |
| "train_runtime": 20913.2373, |
| "train_samples_per_second": 1.425, |
| "train_steps_per_second": 0.045 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 931, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 100, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 194880114524160.0, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|