| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 500, |
| "global_step": 3930, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.002544529262086514, |
| "grad_norm": 2.135885238647461, |
| "learning_rate": 9.160305343511451e-07, |
| "loss": 0.1137, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.005089058524173028, |
| "grad_norm": 2.356735944747925, |
| "learning_rate": 1.933842239185751e-06, |
| "loss": 0.1656, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.007633587786259542, |
| "grad_norm": 3.483503818511963, |
| "learning_rate": 2.951653944020356e-06, |
| "loss": 0.1356, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.010178117048346057, |
| "grad_norm": 3.4808290004730225, |
| "learning_rate": 3.969465648854962e-06, |
| "loss": 0.1575, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.01272264631043257, |
| "grad_norm": 2.5104947090148926, |
| "learning_rate": 4.987277353689568e-06, |
| "loss": 0.0749, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.015267175572519083, |
| "grad_norm": 3.5466084480285645, |
| "learning_rate": 6.005089058524174e-06, |
| "loss": 0.0631, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.017811704834605598, |
| "grad_norm": 0.6639775633811951, |
| "learning_rate": 7.022900763358779e-06, |
| "loss": 0.0374, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.020356234096692113, |
| "grad_norm": 1.4179292917251587, |
| "learning_rate": 8.040712468193384e-06, |
| "loss": 0.0413, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.022900763358778626, |
| "grad_norm": 2.9656121730804443, |
| "learning_rate": 9.058524173027991e-06, |
| "loss": 0.0362, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.02544529262086514, |
| "grad_norm": 3.4811854362487793, |
| "learning_rate": 1.0076335877862595e-05, |
| "loss": 0.0411, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.027989821882951654, |
| "grad_norm": 2.1551337242126465, |
| "learning_rate": 1.1094147582697202e-05, |
| "loss": 0.0255, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.030534351145038167, |
| "grad_norm": 1.5262765884399414, |
| "learning_rate": 1.2111959287531807e-05, |
| "loss": 0.0225, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.03307888040712468, |
| "grad_norm": 1.3079928159713745, |
| "learning_rate": 1.3129770992366414e-05, |
| "loss": 0.0323, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.035623409669211195, |
| "grad_norm": 0.22996912896633148, |
| "learning_rate": 1.4147582697201019e-05, |
| "loss": 0.0188, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.03816793893129771, |
| "grad_norm": 1.0834699869155884, |
| "learning_rate": 1.5165394402035624e-05, |
| "loss": 0.0265, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.04071246819338423, |
| "grad_norm": 0.9807230830192566, |
| "learning_rate": 1.618320610687023e-05, |
| "loss": 0.0189, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.043256997455470736, |
| "grad_norm": 1.8505427837371826, |
| "learning_rate": 1.7201017811704836e-05, |
| "loss": 0.0142, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.04580152671755725, |
| "grad_norm": 0.346323698759079, |
| "learning_rate": 1.8218829516539443e-05, |
| "loss": 0.0237, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.04834605597964377, |
| "grad_norm": 1.4252818822860718, |
| "learning_rate": 1.923664122137405e-05, |
| "loss": 0.0169, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.05089058524173028, |
| "grad_norm": 0.624088704586029, |
| "learning_rate": 2.0254452926208653e-05, |
| "loss": 0.0153, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.05343511450381679, |
| "grad_norm": 0.7245016098022461, |
| "learning_rate": 2.127226463104326e-05, |
| "loss": 0.0211, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.05597964376590331, |
| "grad_norm": 1.2375560998916626, |
| "learning_rate": 2.2290076335877867e-05, |
| "loss": 0.0146, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.058524173027989825, |
| "grad_norm": 0.15632864832878113, |
| "learning_rate": 2.330788804071247e-05, |
| "loss": 0.0179, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.061068702290076333, |
| "grad_norm": 0.023025542497634888, |
| "learning_rate": 2.4325699745547078e-05, |
| "loss": 0.02, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.06361323155216285, |
| "grad_norm": 0.5999135375022888, |
| "learning_rate": 2.5343511450381678e-05, |
| "loss": 0.0057, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.06615776081424936, |
| "grad_norm": 0.08057913929224014, |
| "learning_rate": 2.6361323155216285e-05, |
| "loss": 0.0203, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.06870229007633588, |
| "grad_norm": 0.3958178460597992, |
| "learning_rate": 2.737913486005089e-05, |
| "loss": 0.0365, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.07124681933842239, |
| "grad_norm": 0.24179188907146454, |
| "learning_rate": 2.8396946564885498e-05, |
| "loss": 0.0198, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.0737913486005089, |
| "grad_norm": 1.3984962701797485, |
| "learning_rate": 2.9414758269720102e-05, |
| "loss": 0.0119, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.07633587786259542, |
| "grad_norm": 0.08135154098272324, |
| "learning_rate": 3.043256997455471e-05, |
| "loss": 0.0101, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.07888040712468193, |
| "grad_norm": 2.0051238536834717, |
| "learning_rate": 3.145038167938931e-05, |
| "loss": 0.0245, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.08142493638676845, |
| "grad_norm": 0.49833422899246216, |
| "learning_rate": 3.246819338422392e-05, |
| "loss": 0.0181, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.08396946564885496, |
| "grad_norm": 0.09235569834709167, |
| "learning_rate": 3.3486005089058526e-05, |
| "loss": 0.0116, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.08651399491094147, |
| "grad_norm": 0.06826785206794739, |
| "learning_rate": 3.450381679389313e-05, |
| "loss": 0.0161, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.089058524173028, |
| "grad_norm": 1.1774462461471558, |
| "learning_rate": 3.552162849872774e-05, |
| "loss": 0.0144, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.0916030534351145, |
| "grad_norm": 0.6383160948753357, |
| "learning_rate": 3.653944020356235e-05, |
| "loss": 0.0197, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.09414758269720101, |
| "grad_norm": 0.7711915969848633, |
| "learning_rate": 3.755725190839695e-05, |
| "loss": 0.0086, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.09669211195928754, |
| "grad_norm": 0.7851189970970154, |
| "learning_rate": 3.8575063613231554e-05, |
| "loss": 0.0235, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.09923664122137404, |
| "grad_norm": 0.08650732040405273, |
| "learning_rate": 3.959287531806616e-05, |
| "loss": 0.0151, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.10178117048346055, |
| "grad_norm": 1.2129896879196167, |
| "learning_rate": 3.993214588634436e-05, |
| "loss": 0.0206, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.10432569974554708, |
| "grad_norm": 1.19416344165802, |
| "learning_rate": 3.9819055696918295e-05, |
| "loss": 0.0168, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.10687022900763359, |
| "grad_norm": 0.696465253829956, |
| "learning_rate": 3.970596550749223e-05, |
| "loss": 0.0215, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.10941475826972011, |
| "grad_norm": 0.6113833785057068, |
| "learning_rate": 3.959287531806616e-05, |
| "loss": 0.0152, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.11195928753180662, |
| "grad_norm": 0.04557690769433975, |
| "learning_rate": 3.947978512864009e-05, |
| "loss": 0.0036, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.11450381679389313, |
| "grad_norm": 0.6280457377433777, |
| "learning_rate": 3.9366694939214026e-05, |
| "loss": 0.0091, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.11704834605597965, |
| "grad_norm": 0.6400570273399353, |
| "learning_rate": 3.925360474978796e-05, |
| "loss": 0.0066, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.11959287531806616, |
| "grad_norm": 0.4319634735584259, |
| "learning_rate": 3.914051456036189e-05, |
| "loss": 0.0049, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.12213740458015267, |
| "grad_norm": 0.01860439032316208, |
| "learning_rate": 3.902742437093582e-05, |
| "loss": 0.0098, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.12468193384223919, |
| "grad_norm": 0.03158613294363022, |
| "learning_rate": 3.8914334181509756e-05, |
| "loss": 0.0133, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.1272264631043257, |
| "grad_norm": 0.18111048638820648, |
| "learning_rate": 3.880124399208369e-05, |
| "loss": 0.0172, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.1272264631043257, |
| "eval_loss": 0.04248664900660515, |
| "eval_runtime": 133.7251, |
| "eval_samples_per_second": 59.69, |
| "eval_steps_per_second": 0.471, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.1297709923664122, |
| "grad_norm": 1.3289774656295776, |
| "learning_rate": 3.868815380265762e-05, |
| "loss": 0.0191, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.13231552162849872, |
| "grad_norm": 0.19090017676353455, |
| "learning_rate": 3.8575063613231554e-05, |
| "loss": 0.0149, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.13486005089058525, |
| "grad_norm": 0.12902715802192688, |
| "learning_rate": 3.8461973423805486e-05, |
| "loss": 0.0061, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.13740458015267176, |
| "grad_norm": 0.7780885696411133, |
| "learning_rate": 3.834888323437942e-05, |
| "loss": 0.0109, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.13994910941475827, |
| "grad_norm": 1.1186180114746094, |
| "learning_rate": 3.823579304495336e-05, |
| "loss": 0.0092, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.14249363867684478, |
| "grad_norm": 0.9364501237869263, |
| "learning_rate": 3.8122702855527284e-05, |
| "loss": 0.0146, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.1450381679389313, |
| "grad_norm": 0.32928383350372314, |
| "learning_rate": 3.8009612666101216e-05, |
| "loss": 0.0108, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.1475826972010178, |
| "grad_norm": 0.10685549676418304, |
| "learning_rate": 3.789652247667515e-05, |
| "loss": 0.0039, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.15012722646310434, |
| "grad_norm": 0.07820765674114227, |
| "learning_rate": 3.778343228724908e-05, |
| "loss": 0.0128, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.15267175572519084, |
| "grad_norm": 0.6988438367843628, |
| "learning_rate": 3.767034209782302e-05, |
| "loss": 0.0031, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.15521628498727735, |
| "grad_norm": 0.19633935391902924, |
| "learning_rate": 3.755725190839695e-05, |
| "loss": 0.0131, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.15776081424936386, |
| "grad_norm": 0.019271759316325188, |
| "learning_rate": 3.744416171897088e-05, |
| "loss": 0.0172, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.16030534351145037, |
| "grad_norm": 0.07235724478960037, |
| "learning_rate": 3.733107152954481e-05, |
| "loss": 0.0141, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.1628498727735369, |
| "grad_norm": 0.7043355703353882, |
| "learning_rate": 3.721798134011875e-05, |
| "loss": 0.0093, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.16539440203562342, |
| "grad_norm": 0.18184177577495575, |
| "learning_rate": 3.7104891150692684e-05, |
| "loss": 0.0034, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.16793893129770993, |
| "grad_norm": 0.2706955075263977, |
| "learning_rate": 3.699180096126661e-05, |
| "loss": 0.0066, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.17048346055979643, |
| "grad_norm": 0.2176658660173416, |
| "learning_rate": 3.687871077184054e-05, |
| "loss": 0.0095, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.17302798982188294, |
| "grad_norm": 0.025386134162545204, |
| "learning_rate": 3.676562058241448e-05, |
| "loss": 0.0137, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.17557251908396945, |
| "grad_norm": 0.4194026589393616, |
| "learning_rate": 3.6652530392988414e-05, |
| "loss": 0.0222, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.178117048346056, |
| "grad_norm": 0.8922601938247681, |
| "learning_rate": 3.653944020356235e-05, |
| "loss": 0.0118, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.1806615776081425, |
| "grad_norm": 0.5333408117294312, |
| "learning_rate": 3.642635001413627e-05, |
| "loss": 0.007, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.183206106870229, |
| "grad_norm": 0.033292148262262344, |
| "learning_rate": 3.6313259824710205e-05, |
| "loss": 0.0105, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.18575063613231552, |
| "grad_norm": 0.6917471885681152, |
| "learning_rate": 3.6200169635284144e-05, |
| "loss": 0.0082, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.18829516539440203, |
| "grad_norm": 0.43314284086227417, |
| "learning_rate": 3.608707944585808e-05, |
| "loss": 0.0075, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.19083969465648856, |
| "grad_norm": 0.038449253886938095, |
| "learning_rate": 3.597398925643201e-05, |
| "loss": 0.0103, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.19338422391857507, |
| "grad_norm": 0.47251349687576294, |
| "learning_rate": 3.586089906700594e-05, |
| "loss": 0.0088, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.19592875318066158, |
| "grad_norm": 0.23368120193481445, |
| "learning_rate": 3.5747808877579875e-05, |
| "loss": 0.004, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.1984732824427481, |
| "grad_norm": 0.2743458151817322, |
| "learning_rate": 3.563471868815381e-05, |
| "loss": 0.0063, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.2010178117048346, |
| "grad_norm": 1.0896027088165283, |
| "learning_rate": 3.552162849872774e-05, |
| "loss": 0.0091, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.2035623409669211, |
| "grad_norm": 1.1141908168792725, |
| "learning_rate": 3.540853830930167e-05, |
| "loss": 0.0167, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.20610687022900764, |
| "grad_norm": 0.32985711097717285, |
| "learning_rate": 3.5295448119875605e-05, |
| "loss": 0.0143, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.20865139949109415, |
| "grad_norm": 0.13722413778305054, |
| "learning_rate": 3.518235793044954e-05, |
| "loss": 0.0028, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.21119592875318066, |
| "grad_norm": 0.8024986386299133, |
| "learning_rate": 3.506926774102347e-05, |
| "loss": 0.0103, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.21374045801526717, |
| "grad_norm": 0.7631940245628357, |
| "learning_rate": 3.49561775515974e-05, |
| "loss": 0.0156, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.21628498727735368, |
| "grad_norm": 0.010861819609999657, |
| "learning_rate": 3.4843087362171335e-05, |
| "loss": 0.0047, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.21882951653944022, |
| "grad_norm": 0.1913967728614807, |
| "learning_rate": 3.472999717274527e-05, |
| "loss": 0.019, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.22137404580152673, |
| "grad_norm": 0.03629165515303612, |
| "learning_rate": 3.46169069833192e-05, |
| "loss": 0.0036, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.22391857506361323, |
| "grad_norm": 0.031357087194919586, |
| "learning_rate": 3.450381679389313e-05, |
| "loss": 0.0112, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.22646310432569974, |
| "grad_norm": 0.7820755243301392, |
| "learning_rate": 3.4390726604467065e-05, |
| "loss": 0.0096, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.22900763358778625, |
| "grad_norm": 0.06621856242418289, |
| "learning_rate": 3.4277636415041e-05, |
| "loss": 0.008, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.23155216284987276, |
| "grad_norm": 0.28423774242401123, |
| "learning_rate": 3.416454622561493e-05, |
| "loss": 0.0124, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.2340966921119593, |
| "grad_norm": 0.10950585454702377, |
| "learning_rate": 3.405145603618886e-05, |
| "loss": 0.0088, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.2366412213740458, |
| "grad_norm": 0.05588839575648308, |
| "learning_rate": 3.3938365846762796e-05, |
| "loss": 0.0097, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.23918575063613232, |
| "grad_norm": 0.006185762584209442, |
| "learning_rate": 3.382527565733673e-05, |
| "loss": 0.0019, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.24173027989821882, |
| "grad_norm": 0.02274543233215809, |
| "learning_rate": 3.371218546791066e-05, |
| "loss": 0.0114, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.24427480916030533, |
| "grad_norm": 0.07948128134012222, |
| "learning_rate": 3.3599095278484593e-05, |
| "loss": 0.0021, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.24681933842239187, |
| "grad_norm": 0.29326194524765015, |
| "learning_rate": 3.3486005089058526e-05, |
| "loss": 0.0058, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.24936386768447838, |
| "grad_norm": 0.030000753700733185, |
| "learning_rate": 3.337291489963246e-05, |
| "loss": 0.0088, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.25190839694656486, |
| "grad_norm": 0.12489335238933563, |
| "learning_rate": 3.325982471020639e-05, |
| "loss": 0.0077, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.2544529262086514, |
| "grad_norm": 0.5825827717781067, |
| "learning_rate": 3.3146734520780324e-05, |
| "loss": 0.0073, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.2544529262086514, |
| "eval_loss": 0.03215046226978302, |
| "eval_runtime": 133.7287, |
| "eval_samples_per_second": 59.688, |
| "eval_steps_per_second": 0.471, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.25699745547073793, |
| "grad_norm": 0.07049743831157684, |
| "learning_rate": 3.3033644331354256e-05, |
| "loss": 0.0053, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.2595419847328244, |
| "grad_norm": 0.22097554802894592, |
| "learning_rate": 3.292055414192819e-05, |
| "loss": 0.0051, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.26208651399491095, |
| "grad_norm": 0.6384156942367554, |
| "learning_rate": 3.280746395250212e-05, |
| "loss": 0.0059, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.26463104325699743, |
| "grad_norm": 0.583540678024292, |
| "learning_rate": 3.2694373763076054e-05, |
| "loss": 0.0128, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.26717557251908397, |
| "grad_norm": 0.5119134783744812, |
| "learning_rate": 3.2581283573649987e-05, |
| "loss": 0.0041, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.2697201017811705, |
| "grad_norm": 0.45126932859420776, |
| "learning_rate": 3.246819338422392e-05, |
| "loss": 0.0033, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.272264631043257, |
| "grad_norm": 0.6944173574447632, |
| "learning_rate": 3.235510319479785e-05, |
| "loss": 0.0081, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.2748091603053435, |
| "grad_norm": 0.5973731875419617, |
| "learning_rate": 3.224201300537179e-05, |
| "loss": 0.0071, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.27735368956743, |
| "grad_norm": 0.7595233917236328, |
| "learning_rate": 3.212892281594572e-05, |
| "loss": 0.0085, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.27989821882951654, |
| "grad_norm": 0.040590643882751465, |
| "learning_rate": 3.201583262651965e-05, |
| "loss": 0.0116, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.2824427480916031, |
| "grad_norm": 0.0036058647092431784, |
| "learning_rate": 3.190274243709358e-05, |
| "loss": 0.005, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.28498727735368956, |
| "grad_norm": 0.9417976140975952, |
| "learning_rate": 3.178965224766752e-05, |
| "loss": 0.0183, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.2875318066157761, |
| "grad_norm": 0.03163948655128479, |
| "learning_rate": 3.1676562058241454e-05, |
| "loss": 0.0097, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.2900763358778626, |
| "grad_norm": 0.03139016404747963, |
| "learning_rate": 3.156347186881538e-05, |
| "loss": 0.0041, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.2926208651399491, |
| "grad_norm": 0.02722078561782837, |
| "learning_rate": 3.145038167938931e-05, |
| "loss": 0.0082, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.2951653944020356, |
| "grad_norm": 0.5288887023925781, |
| "learning_rate": 3.1337291489963245e-05, |
| "loss": 0.0051, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.29770992366412213, |
| "grad_norm": 0.9519974589347839, |
| "learning_rate": 3.1224201300537184e-05, |
| "loss": 0.0149, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.30025445292620867, |
| "grad_norm": 0.03469536080956459, |
| "learning_rate": 3.111111111111112e-05, |
| "loss": 0.0056, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.30279898218829515, |
| "grad_norm": 0.40900447964668274, |
| "learning_rate": 3.099802092168504e-05, |
| "loss": 0.0117, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.3053435114503817, |
| "grad_norm": 0.08984329551458359, |
| "learning_rate": 3.0884930732258975e-05, |
| "loss": 0.0046, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.30788804071246817, |
| "grad_norm": 0.007187999319285154, |
| "learning_rate": 3.0771840542832914e-05, |
| "loss": 0.0082, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.3104325699745547, |
| "grad_norm": 0.002739189425483346, |
| "learning_rate": 3.065875035340685e-05, |
| "loss": 0.0117, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.31297709923664124, |
| "grad_norm": 0.14508840441703796, |
| "learning_rate": 3.054566016398078e-05, |
| "loss": 0.0142, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.3155216284987277, |
| "grad_norm": 0.06479866057634354, |
| "learning_rate": 3.043256997455471e-05, |
| "loss": 0.004, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.31806615776081426, |
| "grad_norm": 0.07466994971036911, |
| "learning_rate": 3.031947978512864e-05, |
| "loss": 0.0063, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.32061068702290074, |
| "grad_norm": 0.02722453698515892, |
| "learning_rate": 3.0206389595702577e-05, |
| "loss": 0.021, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.3231552162849873, |
| "grad_norm": 0.06732615828514099, |
| "learning_rate": 3.009329940627651e-05, |
| "loss": 0.0057, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.3256997455470738, |
| "grad_norm": 0.036566682159900665, |
| "learning_rate": 2.998020921685044e-05, |
| "loss": 0.0032, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.3282442748091603, |
| "grad_norm": 0.2943013608455658, |
| "learning_rate": 2.986711902742437e-05, |
| "loss": 0.003, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.33078880407124683, |
| "grad_norm": 0.9214478135108948, |
| "learning_rate": 2.9754028837998307e-05, |
| "loss": 0.0069, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.3333333333333333, |
| "grad_norm": 0.19038018584251404, |
| "learning_rate": 2.964093864857224e-05, |
| "loss": 0.0102, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.33587786259541985, |
| "grad_norm": 0.03719209134578705, |
| "learning_rate": 2.9527848459146173e-05, |
| "loss": 0.0097, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.3384223918575064, |
| "grad_norm": 0.18294648826122284, |
| "learning_rate": 2.9414758269720102e-05, |
| "loss": 0.0028, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.34096692111959287, |
| "grad_norm": 0.6091200113296509, |
| "learning_rate": 2.9301668080294038e-05, |
| "loss": 0.0104, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.3435114503816794, |
| "grad_norm": 0.02828861214220524, |
| "learning_rate": 2.918857789086797e-05, |
| "loss": 0.0098, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.3460559796437659, |
| "grad_norm": 1.0778694152832031, |
| "learning_rate": 2.9075487701441903e-05, |
| "loss": 0.0145, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.3486005089058524, |
| "grad_norm": 0.036233462393283844, |
| "learning_rate": 2.8962397512015835e-05, |
| "loss": 0.0084, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.3511450381679389, |
| "grad_norm": 0.6519142985343933, |
| "learning_rate": 2.8849307322589765e-05, |
| "loss": 0.011, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.35368956743002544, |
| "grad_norm": 0.39109641313552856, |
| "learning_rate": 2.87362171331637e-05, |
| "loss": 0.0094, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.356234096692112, |
| "grad_norm": 0.08204642683267593, |
| "learning_rate": 2.8623126943737633e-05, |
| "loss": 0.0066, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.35877862595419846, |
| "grad_norm": 0.04721367731690407, |
| "learning_rate": 2.8510036754311566e-05, |
| "loss": 0.0093, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.361323155216285, |
| "grad_norm": 0.6993855834007263, |
| "learning_rate": 2.8396946564885498e-05, |
| "loss": 0.0103, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.3638676844783715, |
| "grad_norm": 0.04354240000247955, |
| "learning_rate": 2.8283856375459434e-05, |
| "loss": 0.0042, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.366412213740458, |
| "grad_norm": 0.03972458466887474, |
| "learning_rate": 2.8170766186033363e-05, |
| "loss": 0.0153, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.36895674300254455, |
| "grad_norm": 0.010820034891366959, |
| "learning_rate": 2.8057675996607296e-05, |
| "loss": 0.0118, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.37150127226463103, |
| "grad_norm": 0.034054119139909744, |
| "learning_rate": 2.794458580718123e-05, |
| "loss": 0.0096, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.37404580152671757, |
| "grad_norm": 0.28809288144111633, |
| "learning_rate": 2.783149561775516e-05, |
| "loss": 0.0028, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.37659033078880405, |
| "grad_norm": 0.15929700434207916, |
| "learning_rate": 2.7718405428329097e-05, |
| "loss": 0.0128, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.3791348600508906, |
| "grad_norm": 0.12857544422149658, |
| "learning_rate": 2.7605315238903026e-05, |
| "loss": 0.0075, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.3816793893129771, |
| "grad_norm": 0.021462570875883102, |
| "learning_rate": 2.749222504947696e-05, |
| "loss": 0.0124, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.3816793893129771, |
| "eval_loss": 0.029054569080471992, |
| "eval_runtime": 133.7839, |
| "eval_samples_per_second": 59.663, |
| "eval_steps_per_second": 0.471, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.3842239185750636, |
| "grad_norm": 0.942009687423706, |
| "learning_rate": 2.737913486005089e-05, |
| "loss": 0.0124, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.38676844783715014, |
| "grad_norm": 0.3137159049510956, |
| "learning_rate": 2.7266044670624827e-05, |
| "loss": 0.0033, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.3893129770992366, |
| "grad_norm": 0.18512937426567078, |
| "learning_rate": 2.715295448119876e-05, |
| "loss": 0.0048, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.39185750636132316, |
| "grad_norm": 0.07269980758428574, |
| "learning_rate": 2.703986429177269e-05, |
| "loss": 0.0035, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.3944020356234097, |
| "grad_norm": 0.33466291427612305, |
| "learning_rate": 2.692677410234662e-05, |
| "loss": 0.0066, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.3969465648854962, |
| "grad_norm": 0.5403372049331665, |
| "learning_rate": 2.6813683912920558e-05, |
| "loss": 0.0109, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.3994910941475827, |
| "grad_norm": 0.23673081398010254, |
| "learning_rate": 2.670059372349449e-05, |
| "loss": 0.0032, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.4020356234096692, |
| "grad_norm": 0.030269034206867218, |
| "learning_rate": 2.6587503534068423e-05, |
| "loss": 0.003, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.40458015267175573, |
| "grad_norm": 1.2594910860061646, |
| "learning_rate": 2.6474413344642352e-05, |
| "loss": 0.0087, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.4071246819338422, |
| "grad_norm": 0.014432943426072598, |
| "learning_rate": 2.6361323155216285e-05, |
| "loss": 0.0051, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.40966921119592875, |
| "grad_norm": 0.0029970400501042604, |
| "learning_rate": 2.624823296579022e-05, |
| "loss": 0.0017, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.4122137404580153, |
| "grad_norm": 0.08458627015352249, |
| "learning_rate": 2.6135142776364153e-05, |
| "loss": 0.0021, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.41475826972010177, |
| "grad_norm": 0.8725960850715637, |
| "learning_rate": 2.6022052586938086e-05, |
| "loss": 0.0053, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.4173027989821883, |
| "grad_norm": 0.7579461932182312, |
| "learning_rate": 2.5908962397512015e-05, |
| "loss": 0.0071, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.4198473282442748, |
| "grad_norm": 0.0022659231908619404, |
| "learning_rate": 2.5795872208085954e-05, |
| "loss": 0.003, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.4223918575063613, |
| "grad_norm": 0.04837888479232788, |
| "learning_rate": 2.5682782018659883e-05, |
| "loss": 0.014, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.42493638676844786, |
| "grad_norm": 0.006061100866645575, |
| "learning_rate": 2.5569691829233816e-05, |
| "loss": 0.0045, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.42748091603053434, |
| "grad_norm": 0.09335777908563614, |
| "learning_rate": 2.545660163980775e-05, |
| "loss": 0.007, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.4300254452926209, |
| "grad_norm": 0.25022462010383606, |
| "learning_rate": 2.5343511450381678e-05, |
| "loss": 0.0118, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.43256997455470736, |
| "grad_norm": 0.1105409562587738, |
| "learning_rate": 2.5230421260955617e-05, |
| "loss": 0.0082, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.4351145038167939, |
| "grad_norm": 0.6487683653831482, |
| "learning_rate": 2.5117331071529546e-05, |
| "loss": 0.0093, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.43765903307888043, |
| "grad_norm": 0.0011998299742117524, |
| "learning_rate": 2.500424088210348e-05, |
| "loss": 0.0064, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.4402035623409669, |
| "grad_norm": 0.8891687393188477, |
| "learning_rate": 2.489115069267741e-05, |
| "loss": 0.0089, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.44274809160305345, |
| "grad_norm": 0.03843465447425842, |
| "learning_rate": 2.4778060503251347e-05, |
| "loss": 0.0014, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.44529262086513993, |
| "grad_norm": 0.8782166242599487, |
| "learning_rate": 2.466497031382528e-05, |
| "loss": 0.0069, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.44783715012722647, |
| "grad_norm": 0.00530367624014616, |
| "learning_rate": 2.455188012439921e-05, |
| "loss": 0.0069, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.45038167938931295, |
| "grad_norm": 0.6024774312973022, |
| "learning_rate": 2.443878993497314e-05, |
| "loss": 0.0092, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.4529262086513995, |
| "grad_norm": 0.5995576977729797, |
| "learning_rate": 2.4325699745547078e-05, |
| "loss": 0.0153, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.455470737913486, |
| "grad_norm": 0.07373733073472977, |
| "learning_rate": 2.421260955612101e-05, |
| "loss": 0.0071, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.4580152671755725, |
| "grad_norm": 0.5337245464324951, |
| "learning_rate": 2.4099519366694943e-05, |
| "loss": 0.0085, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.46055979643765904, |
| "grad_norm": 0.10243307799100876, |
| "learning_rate": 2.3986429177268872e-05, |
| "loss": 0.0081, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.4631043256997455, |
| "grad_norm": 0.0933896005153656, |
| "learning_rate": 2.3873338987842804e-05, |
| "loss": 0.0042, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.46564885496183206, |
| "grad_norm": 0.02155870944261551, |
| "learning_rate": 2.376024879841674e-05, |
| "loss": 0.0085, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.4681933842239186, |
| "grad_norm": 0.018530339002609253, |
| "learning_rate": 2.3647158608990673e-05, |
| "loss": 0.0061, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.4707379134860051, |
| "grad_norm": 0.37336698174476624, |
| "learning_rate": 2.3534068419564605e-05, |
| "loss": 0.0062, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.4732824427480916, |
| "grad_norm": 0.45383787155151367, |
| "learning_rate": 2.3420978230138535e-05, |
| "loss": 0.0138, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.4758269720101781, |
| "grad_norm": 0.007181716617196798, |
| "learning_rate": 2.330788804071247e-05, |
| "loss": 0.0081, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.47837150127226463, |
| "grad_norm": 0.24207919836044312, |
| "learning_rate": 2.3194797851286403e-05, |
| "loss": 0.0043, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.48091603053435117, |
| "grad_norm": 0.9251128435134888, |
| "learning_rate": 2.3081707661860336e-05, |
| "loss": 0.0112, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.48346055979643765, |
| "grad_norm": 0.15212811529636383, |
| "learning_rate": 2.296861747243427e-05, |
| "loss": 0.0073, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.4860050890585242, |
| "grad_norm": 0.022100647911429405, |
| "learning_rate": 2.2855527283008204e-05, |
| "loss": 0.0077, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.48854961832061067, |
| "grad_norm": 0.3646630644798279, |
| "learning_rate": 2.2742437093582133e-05, |
| "loss": 0.0038, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.4910941475826972, |
| "grad_norm": 1.1903932094573975, |
| "learning_rate": 2.2629346904156066e-05, |
| "loss": 0.0077, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.49363867684478374, |
| "grad_norm": 0.4780580997467041, |
| "learning_rate": 2.251625671473e-05, |
| "loss": 0.0041, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.4961832061068702, |
| "grad_norm": 0.00539785111322999, |
| "learning_rate": 2.240316652530393e-05, |
| "loss": 0.008, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.49872773536895676, |
| "grad_norm": 0.3662808835506439, |
| "learning_rate": 2.2290076335877867e-05, |
| "loss": 0.0045, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.5012722646310432, |
| "grad_norm": 0.7225646376609802, |
| "learning_rate": 2.2176986146451796e-05, |
| "loss": 0.0071, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.5038167938931297, |
| "grad_norm": 0.2498391717672348, |
| "learning_rate": 2.206389595702573e-05, |
| "loss": 0.0059, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.5063613231552163, |
| "grad_norm": 0.3068143129348755, |
| "learning_rate": 2.195080576759966e-05, |
| "loss": 0.0039, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.5089058524173028, |
| "grad_norm": 0.09063991904258728, |
| "learning_rate": 2.1837715578173597e-05, |
| "loss": 0.0021, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.5089058524173028, |
| "eval_loss": 0.026657788082957268, |
| "eval_runtime": 133.8684, |
| "eval_samples_per_second": 59.626, |
| "eval_steps_per_second": 0.471, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.5114503816793893, |
| "grad_norm": 0.008128073066473007, |
| "learning_rate": 2.172462538874753e-05, |
| "loss": 0.0113, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.5139949109414759, |
| "grad_norm": 0.5956616401672363, |
| "learning_rate": 2.161153519932146e-05, |
| "loss": 0.0062, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.5165394402035624, |
| "grad_norm": 0.1394854635000229, |
| "learning_rate": 2.1498445009895392e-05, |
| "loss": 0.005, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.5190839694656488, |
| "grad_norm": 0.636408269405365, |
| "learning_rate": 2.1385354820469324e-05, |
| "loss": 0.0098, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.5216284987277354, |
| "grad_norm": 0.33946555852890015, |
| "learning_rate": 2.127226463104326e-05, |
| "loss": 0.0108, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.5241730279898219, |
| "grad_norm": 0.8185147643089294, |
| "learning_rate": 2.1159174441617193e-05, |
| "loss": 0.0085, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.5267175572519084, |
| "grad_norm": 0.20138558745384216, |
| "learning_rate": 2.1046084252191122e-05, |
| "loss": 0.011, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.5292620865139949, |
| "grad_norm": 0.738299548625946, |
| "learning_rate": 2.0932994062765055e-05, |
| "loss": 0.0125, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.5318066157760815, |
| "grad_norm": 0.007991126738488674, |
| "learning_rate": 2.081990387333899e-05, |
| "loss": 0.009, |
| "step": 2090 |
| }, |
| { |
| "epoch": 0.5343511450381679, |
| "grad_norm": 0.864669919013977, |
| "learning_rate": 2.0706813683912923e-05, |
| "loss": 0.0114, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.5368956743002544, |
| "grad_norm": 0.07526708394289017, |
| "learning_rate": 2.0593723494486856e-05, |
| "loss": 0.0109, |
| "step": 2110 |
| }, |
| { |
| "epoch": 0.539440203562341, |
| "grad_norm": 0.14376136660575867, |
| "learning_rate": 2.0480633305060785e-05, |
| "loss": 0.007, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.5419847328244275, |
| "grad_norm": 0.03273206949234009, |
| "learning_rate": 2.0367543115634724e-05, |
| "loss": 0.0058, |
| "step": 2130 |
| }, |
| { |
| "epoch": 0.544529262086514, |
| "grad_norm": 0.0066210562363266945, |
| "learning_rate": 2.0254452926208653e-05, |
| "loss": 0.0116, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.5470737913486005, |
| "grad_norm": 0.11024600267410278, |
| "learning_rate": 2.0141362736782586e-05, |
| "loss": 0.0106, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.549618320610687, |
| "grad_norm": 0.43245652318000793, |
| "learning_rate": 2.002827254735652e-05, |
| "loss": 0.0051, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.5521628498727735, |
| "grad_norm": 0.19758453965187073, |
| "learning_rate": 1.991518235793045e-05, |
| "loss": 0.0051, |
| "step": 2170 |
| }, |
| { |
| "epoch": 0.55470737913486, |
| "grad_norm": 0.7388942241668701, |
| "learning_rate": 1.9802092168504384e-05, |
| "loss": 0.0073, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.5572519083969466, |
| "grad_norm": 0.021665601059794426, |
| "learning_rate": 1.9689001979078316e-05, |
| "loss": 0.0046, |
| "step": 2190 |
| }, |
| { |
| "epoch": 0.5597964376590331, |
| "grad_norm": 0.003727781120687723, |
| "learning_rate": 1.957591178965225e-05, |
| "loss": 0.0125, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.5623409669211196, |
| "grad_norm": 0.42983368039131165, |
| "learning_rate": 1.946282160022618e-05, |
| "loss": 0.0031, |
| "step": 2210 |
| }, |
| { |
| "epoch": 0.5648854961832062, |
| "grad_norm": 0.5328766703605652, |
| "learning_rate": 1.9349731410800114e-05, |
| "loss": 0.0103, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.5674300254452926, |
| "grad_norm": 0.9336711764335632, |
| "learning_rate": 1.923664122137405e-05, |
| "loss": 0.0171, |
| "step": 2230 |
| }, |
| { |
| "epoch": 0.5699745547073791, |
| "grad_norm": 0.04398497939109802, |
| "learning_rate": 1.912355103194798e-05, |
| "loss": 0.0021, |
| "step": 2240 |
| }, |
| { |
| "epoch": 0.5725190839694656, |
| "grad_norm": 0.39377424120903015, |
| "learning_rate": 1.901046084252191e-05, |
| "loss": 0.0104, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.5750636132315522, |
| "grad_norm": 0.028455466032028198, |
| "learning_rate": 1.8897370653095844e-05, |
| "loss": 0.0053, |
| "step": 2260 |
| }, |
| { |
| "epoch": 0.5776081424936387, |
| "grad_norm": 0.2810778021812439, |
| "learning_rate": 1.8784280463669777e-05, |
| "loss": 0.0071, |
| "step": 2270 |
| }, |
| { |
| "epoch": 0.5801526717557252, |
| "grad_norm": 0.5420275330543518, |
| "learning_rate": 1.8671190274243713e-05, |
| "loss": 0.0075, |
| "step": 2280 |
| }, |
| { |
| "epoch": 0.5826972010178118, |
| "grad_norm": 0.7387365102767944, |
| "learning_rate": 1.8558100084817642e-05, |
| "loss": 0.0067, |
| "step": 2290 |
| }, |
| { |
| "epoch": 0.5852417302798982, |
| "grad_norm": 0.006927556823939085, |
| "learning_rate": 1.8445009895391578e-05, |
| "loss": 0.0008, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.5877862595419847, |
| "grad_norm": 0.5698086023330688, |
| "learning_rate": 1.8331919705965507e-05, |
| "loss": 0.0029, |
| "step": 2310 |
| }, |
| { |
| "epoch": 0.5903307888040712, |
| "grad_norm": 0.10498206317424774, |
| "learning_rate": 1.8218829516539443e-05, |
| "loss": 0.0121, |
| "step": 2320 |
| }, |
| { |
| "epoch": 0.5928753180661578, |
| "grad_norm": 0.05598974600434303, |
| "learning_rate": 1.8105739327113376e-05, |
| "loss": 0.0075, |
| "step": 2330 |
| }, |
| { |
| "epoch": 0.5954198473282443, |
| "grad_norm": 0.11475464701652527, |
| "learning_rate": 1.7992649137687308e-05, |
| "loss": 0.004, |
| "step": 2340 |
| }, |
| { |
| "epoch": 0.5979643765903307, |
| "grad_norm": 0.0038927500136196613, |
| "learning_rate": 1.787955894826124e-05, |
| "loss": 0.0087, |
| "step": 2350 |
| }, |
| { |
| "epoch": 0.6005089058524173, |
| "grad_norm": 0.4016096293926239, |
| "learning_rate": 1.776646875883517e-05, |
| "loss": 0.0037, |
| "step": 2360 |
| }, |
| { |
| "epoch": 0.6030534351145038, |
| "grad_norm": 0.13292540609836578, |
| "learning_rate": 1.7653378569409106e-05, |
| "loss": 0.0041, |
| "step": 2370 |
| }, |
| { |
| "epoch": 0.6055979643765903, |
| "grad_norm": 0.01060889195650816, |
| "learning_rate": 1.754028837998304e-05, |
| "loss": 0.0075, |
| "step": 2380 |
| }, |
| { |
| "epoch": 0.6081424936386769, |
| "grad_norm": 0.39646655321121216, |
| "learning_rate": 1.742719819055697e-05, |
| "loss": 0.008, |
| "step": 2390 |
| }, |
| { |
| "epoch": 0.6106870229007634, |
| "grad_norm": 0.14969764649868011, |
| "learning_rate": 1.7314108001130904e-05, |
| "loss": 0.0141, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.6132315521628499, |
| "grad_norm": 0.0635710060596466, |
| "learning_rate": 1.7201017811704836e-05, |
| "loss": 0.0069, |
| "step": 2410 |
| }, |
| { |
| "epoch": 0.6157760814249363, |
| "grad_norm": 0.48808881640434265, |
| "learning_rate": 1.708792762227877e-05, |
| "loss": 0.0111, |
| "step": 2420 |
| }, |
| { |
| "epoch": 0.6183206106870229, |
| "grad_norm": 0.06506534665822983, |
| "learning_rate": 1.69748374328527e-05, |
| "loss": 0.0025, |
| "step": 2430 |
| }, |
| { |
| "epoch": 0.6208651399491094, |
| "grad_norm": 0.020378531888127327, |
| "learning_rate": 1.6861747243426634e-05, |
| "loss": 0.0093, |
| "step": 2440 |
| }, |
| { |
| "epoch": 0.6234096692111959, |
| "grad_norm": 0.18914295732975006, |
| "learning_rate": 1.6748657054000566e-05, |
| "loss": 0.0048, |
| "step": 2450 |
| }, |
| { |
| "epoch": 0.6259541984732825, |
| "grad_norm": 0.6544604301452637, |
| "learning_rate": 1.66355668645745e-05, |
| "loss": 0.0128, |
| "step": 2460 |
| }, |
| { |
| "epoch": 0.628498727735369, |
| "grad_norm": 0.1411842703819275, |
| "learning_rate": 1.652247667514843e-05, |
| "loss": 0.0031, |
| "step": 2470 |
| }, |
| { |
| "epoch": 0.6310432569974554, |
| "grad_norm": 0.4283600449562073, |
| "learning_rate": 1.6409386485722364e-05, |
| "loss": 0.019, |
| "step": 2480 |
| }, |
| { |
| "epoch": 0.6335877862595419, |
| "grad_norm": 0.017621824517846107, |
| "learning_rate": 1.6296296296296297e-05, |
| "loss": 0.0105, |
| "step": 2490 |
| }, |
| { |
| "epoch": 0.6361323155216285, |
| "grad_norm": 0.6252580285072327, |
| "learning_rate": 1.618320610687023e-05, |
| "loss": 0.0059, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.6361323155216285, |
| "eval_loss": 0.027058852836489677, |
| "eval_runtime": 133.9207, |
| "eval_samples_per_second": 59.602, |
| "eval_steps_per_second": 0.47, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.638676844783715, |
| "grad_norm": 0.6191806197166443, |
| "learning_rate": 1.6070115917444162e-05, |
| "loss": 0.0102, |
| "step": 2510 |
| }, |
| { |
| "epoch": 0.6412213740458015, |
| "grad_norm": 0.5098242163658142, |
| "learning_rate": 1.5957025728018098e-05, |
| "loss": 0.0085, |
| "step": 2520 |
| }, |
| { |
| "epoch": 0.6437659033078881, |
| "grad_norm": 0.3731755018234253, |
| "learning_rate": 1.5843935538592027e-05, |
| "loss": 0.0104, |
| "step": 2530 |
| }, |
| { |
| "epoch": 0.6463104325699746, |
| "grad_norm": 0.002762430114671588, |
| "learning_rate": 1.5730845349165963e-05, |
| "loss": 0.0059, |
| "step": 2540 |
| }, |
| { |
| "epoch": 0.648854961832061, |
| "grad_norm": 0.6603003144264221, |
| "learning_rate": 1.5617755159739892e-05, |
| "loss": 0.01, |
| "step": 2550 |
| }, |
| { |
| "epoch": 0.6513994910941476, |
| "grad_norm": 0.3234529197216034, |
| "learning_rate": 1.5504664970313828e-05, |
| "loss": 0.0225, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.6539440203562341, |
| "grad_norm": 0.37260857224464417, |
| "learning_rate": 1.539157478088776e-05, |
| "loss": 0.0104, |
| "step": 2570 |
| }, |
| { |
| "epoch": 0.6564885496183206, |
| "grad_norm": 0.03868274390697479, |
| "learning_rate": 1.5278484591461693e-05, |
| "loss": 0.0099, |
| "step": 2580 |
| }, |
| { |
| "epoch": 0.6590330788804071, |
| "grad_norm": 0.6250960826873779, |
| "learning_rate": 1.5165394402035624e-05, |
| "loss": 0.0138, |
| "step": 2590 |
| }, |
| { |
| "epoch": 0.6615776081424937, |
| "grad_norm": 0.04367992654442787, |
| "learning_rate": 1.5052304212609557e-05, |
| "loss": 0.0036, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.6641221374045801, |
| "grad_norm": 0.021459773182868958, |
| "learning_rate": 1.493921402318349e-05, |
| "loss": 0.0099, |
| "step": 2610 |
| }, |
| { |
| "epoch": 0.6666666666666666, |
| "grad_norm": 0.010171366855502129, |
| "learning_rate": 1.4826123833757422e-05, |
| "loss": 0.0054, |
| "step": 2620 |
| }, |
| { |
| "epoch": 0.6692111959287532, |
| "grad_norm": 0.01782556250691414, |
| "learning_rate": 1.4713033644331356e-05, |
| "loss": 0.0033, |
| "step": 2630 |
| }, |
| { |
| "epoch": 0.6717557251908397, |
| "grad_norm": 0.34946757555007935, |
| "learning_rate": 1.4599943454905287e-05, |
| "loss": 0.0057, |
| "step": 2640 |
| }, |
| { |
| "epoch": 0.6743002544529262, |
| "grad_norm": 0.00784808024764061, |
| "learning_rate": 1.4486853265479221e-05, |
| "loss": 0.0031, |
| "step": 2650 |
| }, |
| { |
| "epoch": 0.6768447837150128, |
| "grad_norm": 0.539448082447052, |
| "learning_rate": 1.4373763076053154e-05, |
| "loss": 0.0036, |
| "step": 2660 |
| }, |
| { |
| "epoch": 0.6793893129770993, |
| "grad_norm": 0.595289945602417, |
| "learning_rate": 1.4260672886627088e-05, |
| "loss": 0.0111, |
| "step": 2670 |
| }, |
| { |
| "epoch": 0.6819338422391857, |
| "grad_norm": 0.7016165256500244, |
| "learning_rate": 1.4147582697201019e-05, |
| "loss": 0.0263, |
| "step": 2680 |
| }, |
| { |
| "epoch": 0.6844783715012722, |
| "grad_norm": 1.089051365852356, |
| "learning_rate": 1.4034492507774953e-05, |
| "loss": 0.0174, |
| "step": 2690 |
| }, |
| { |
| "epoch": 0.6870229007633588, |
| "grad_norm": 1.062965989112854, |
| "learning_rate": 1.3921402318348884e-05, |
| "loss": 0.0059, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.6895674300254453, |
| "grad_norm": 1.098395586013794, |
| "learning_rate": 1.3808312128922817e-05, |
| "loss": 0.0071, |
| "step": 2710 |
| }, |
| { |
| "epoch": 0.6921119592875318, |
| "grad_norm": 0.02575494349002838, |
| "learning_rate": 1.369522193949675e-05, |
| "loss": 0.0099, |
| "step": 2720 |
| }, |
| { |
| "epoch": 0.6946564885496184, |
| "grad_norm": 0.20344482362270355, |
| "learning_rate": 1.3582131750070682e-05, |
| "loss": 0.0115, |
| "step": 2730 |
| }, |
| { |
| "epoch": 0.6972010178117048, |
| "grad_norm": 0.10060387849807739, |
| "learning_rate": 1.3469041560644616e-05, |
| "loss": 0.0112, |
| "step": 2740 |
| }, |
| { |
| "epoch": 0.6997455470737913, |
| "grad_norm": 0.43116071820259094, |
| "learning_rate": 1.3355951371218547e-05, |
| "loss": 0.0047, |
| "step": 2750 |
| }, |
| { |
| "epoch": 0.7022900763358778, |
| "grad_norm": 0.22251801192760468, |
| "learning_rate": 1.3242861181792481e-05, |
| "loss": 0.0043, |
| "step": 2760 |
| }, |
| { |
| "epoch": 0.7048346055979644, |
| "grad_norm": 0.6789427995681763, |
| "learning_rate": 1.3129770992366414e-05, |
| "loss": 0.006, |
| "step": 2770 |
| }, |
| { |
| "epoch": 0.7073791348600509, |
| "grad_norm": 0.01072303019464016, |
| "learning_rate": 1.3016680802940346e-05, |
| "loss": 0.0008, |
| "step": 2780 |
| }, |
| { |
| "epoch": 0.7099236641221374, |
| "grad_norm": 0.0031219993252307177, |
| "learning_rate": 1.2903590613514279e-05, |
| "loss": 0.0038, |
| "step": 2790 |
| }, |
| { |
| "epoch": 0.712468193384224, |
| "grad_norm": 0.2788158059120178, |
| "learning_rate": 1.2790500424088213e-05, |
| "loss": 0.0048, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.7150127226463104, |
| "grad_norm": 0.011245734058320522, |
| "learning_rate": 1.2677410234662144e-05, |
| "loss": 0.0048, |
| "step": 2810 |
| }, |
| { |
| "epoch": 0.7175572519083969, |
| "grad_norm": 0.6008136868476868, |
| "learning_rate": 1.2564320045236076e-05, |
| "loss": 0.0062, |
| "step": 2820 |
| }, |
| { |
| "epoch": 0.7201017811704835, |
| "grad_norm": 0.027553007006645203, |
| "learning_rate": 1.2451229855810009e-05, |
| "loss": 0.0014, |
| "step": 2830 |
| }, |
| { |
| "epoch": 0.72264631043257, |
| "grad_norm": 0.011731176637113094, |
| "learning_rate": 1.2338139666383942e-05, |
| "loss": 0.0128, |
| "step": 2840 |
| }, |
| { |
| "epoch": 0.7251908396946565, |
| "grad_norm": 0.0019871287513524294, |
| "learning_rate": 1.2225049476957876e-05, |
| "loss": 0.0043, |
| "step": 2850 |
| }, |
| { |
| "epoch": 0.727735368956743, |
| "grad_norm": 0.2408047765493393, |
| "learning_rate": 1.2111959287531807e-05, |
| "loss": 0.0153, |
| "step": 2860 |
| }, |
| { |
| "epoch": 0.7302798982188295, |
| "grad_norm": 0.3186394274234772, |
| "learning_rate": 1.1998869098105741e-05, |
| "loss": 0.0012, |
| "step": 2870 |
| }, |
| { |
| "epoch": 0.732824427480916, |
| "grad_norm": 1.3726838827133179, |
| "learning_rate": 1.1885778908679672e-05, |
| "loss": 0.0058, |
| "step": 2880 |
| }, |
| { |
| "epoch": 0.7353689567430025, |
| "grad_norm": 0.8535423874855042, |
| "learning_rate": 1.1772688719253606e-05, |
| "loss": 0.0091, |
| "step": 2890 |
| }, |
| { |
| "epoch": 0.7379134860050891, |
| "grad_norm": 0.3728189170360565, |
| "learning_rate": 1.1659598529827539e-05, |
| "loss": 0.0107, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.7404580152671756, |
| "grad_norm": 0.02934439294040203, |
| "learning_rate": 1.1546508340401473e-05, |
| "loss": 0.0144, |
| "step": 2910 |
| }, |
| { |
| "epoch": 0.7430025445292621, |
| "grad_norm": 0.051281895488500595, |
| "learning_rate": 1.1433418150975404e-05, |
| "loss": 0.0038, |
| "step": 2920 |
| }, |
| { |
| "epoch": 0.7455470737913485, |
| "grad_norm": 0.04496416449546814, |
| "learning_rate": 1.1320327961549336e-05, |
| "loss": 0.002, |
| "step": 2930 |
| }, |
| { |
| "epoch": 0.7480916030534351, |
| "grad_norm": 1.223193645477295, |
| "learning_rate": 1.1207237772123269e-05, |
| "loss": 0.0078, |
| "step": 2940 |
| }, |
| { |
| "epoch": 0.7506361323155216, |
| "grad_norm": 0.4533013701438904, |
| "learning_rate": 1.1094147582697202e-05, |
| "loss": 0.0107, |
| "step": 2950 |
| }, |
| { |
| "epoch": 0.7531806615776081, |
| "grad_norm": 0.4159058928489685, |
| "learning_rate": 1.0981057393271136e-05, |
| "loss": 0.0049, |
| "step": 2960 |
| }, |
| { |
| "epoch": 0.7557251908396947, |
| "grad_norm": 0.6587101817131042, |
| "learning_rate": 1.0867967203845067e-05, |
| "loss": 0.0062, |
| "step": 2970 |
| }, |
| { |
| "epoch": 0.7582697201017812, |
| "grad_norm": 0.19408226013183594, |
| "learning_rate": 1.0754877014419001e-05, |
| "loss": 0.0065, |
| "step": 2980 |
| }, |
| { |
| "epoch": 0.7608142493638677, |
| "grad_norm": 0.12487910687923431, |
| "learning_rate": 1.0641786824992932e-05, |
| "loss": 0.0032, |
| "step": 2990 |
| }, |
| { |
| "epoch": 0.7633587786259542, |
| "grad_norm": 0.7133147120475769, |
| "learning_rate": 1.0528696635566866e-05, |
| "loss": 0.0075, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.7633587786259542, |
| "eval_loss": 0.02747577615082264, |
| "eval_runtime": 133.892, |
| "eval_samples_per_second": 59.615, |
| "eval_steps_per_second": 0.471, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.7659033078880407, |
| "grad_norm": 0.05647735670208931, |
| "learning_rate": 1.0415606446140799e-05, |
| "loss": 0.0067, |
| "step": 3010 |
| }, |
| { |
| "epoch": 0.7684478371501272, |
| "grad_norm": 0.06330792605876923, |
| "learning_rate": 1.0302516256714731e-05, |
| "loss": 0.004, |
| "step": 3020 |
| }, |
| { |
| "epoch": 0.7709923664122137, |
| "grad_norm": 1.3889905214309692, |
| "learning_rate": 1.0189426067288664e-05, |
| "loss": 0.0094, |
| "step": 3030 |
| }, |
| { |
| "epoch": 0.7735368956743003, |
| "grad_norm": 0.34020090103149414, |
| "learning_rate": 1.0076335877862595e-05, |
| "loss": 0.0052, |
| "step": 3040 |
| }, |
| { |
| "epoch": 0.7760814249363868, |
| "grad_norm": 0.05108115077018738, |
| "learning_rate": 9.963245688436529e-06, |
| "loss": 0.0022, |
| "step": 3050 |
| }, |
| { |
| "epoch": 0.7786259541984732, |
| "grad_norm": 0.8076844811439514, |
| "learning_rate": 9.850155499010461e-06, |
| "loss": 0.0112, |
| "step": 3060 |
| }, |
| { |
| "epoch": 0.7811704834605598, |
| "grad_norm": 0.09561960399150848, |
| "learning_rate": 9.737065309584394e-06, |
| "loss": 0.0182, |
| "step": 3070 |
| }, |
| { |
| "epoch": 0.7837150127226463, |
| "grad_norm": 0.2492062747478485, |
| "learning_rate": 9.623975120158328e-06, |
| "loss": 0.0049, |
| "step": 3080 |
| }, |
| { |
| "epoch": 0.7862595419847328, |
| "grad_norm": 0.007754405960440636, |
| "learning_rate": 9.51088493073226e-06, |
| "loss": 0.006, |
| "step": 3090 |
| }, |
| { |
| "epoch": 0.7888040712468194, |
| "grad_norm": 0.21519310772418976, |
| "learning_rate": 9.397794741306192e-06, |
| "loss": 0.006, |
| "step": 3100 |
| }, |
| { |
| "epoch": 0.7913486005089059, |
| "grad_norm": 0.09309302270412445, |
| "learning_rate": 9.284704551880124e-06, |
| "loss": 0.0084, |
| "step": 3110 |
| }, |
| { |
| "epoch": 0.7938931297709924, |
| "grad_norm": 0.04294523224234581, |
| "learning_rate": 9.171614362454057e-06, |
| "loss": 0.0041, |
| "step": 3120 |
| }, |
| { |
| "epoch": 0.7964376590330788, |
| "grad_norm": 0.004443097859621048, |
| "learning_rate": 9.058524173027991e-06, |
| "loss": 0.0115, |
| "step": 3130 |
| }, |
| { |
| "epoch": 0.7989821882951654, |
| "grad_norm": 0.1392659693956375, |
| "learning_rate": 8.945433983601924e-06, |
| "loss": 0.0074, |
| "step": 3140 |
| }, |
| { |
| "epoch": 0.8015267175572519, |
| "grad_norm": 0.02126116305589676, |
| "learning_rate": 8.832343794175856e-06, |
| "loss": 0.0056, |
| "step": 3150 |
| }, |
| { |
| "epoch": 0.8040712468193384, |
| "grad_norm": 0.7408782839775085, |
| "learning_rate": 8.719253604749789e-06, |
| "loss": 0.0134, |
| "step": 3160 |
| }, |
| { |
| "epoch": 0.806615776081425, |
| "grad_norm": 0.0540887750685215, |
| "learning_rate": 8.606163415323721e-06, |
| "loss": 0.0089, |
| "step": 3170 |
| }, |
| { |
| "epoch": 0.8091603053435115, |
| "grad_norm": 0.38249894976615906, |
| "learning_rate": 8.493073225897654e-06, |
| "loss": 0.0077, |
| "step": 3180 |
| }, |
| { |
| "epoch": 0.811704834605598, |
| "grad_norm": 0.04060674458742142, |
| "learning_rate": 8.379983036471587e-06, |
| "loss": 0.0121, |
| "step": 3190 |
| }, |
| { |
| "epoch": 0.8142493638676844, |
| "grad_norm": 0.8929535150527954, |
| "learning_rate": 8.266892847045519e-06, |
| "loss": 0.0327, |
| "step": 3200 |
| }, |
| { |
| "epoch": 0.816793893129771, |
| "grad_norm": 0.029025282710790634, |
| "learning_rate": 8.153802657619452e-06, |
| "loss": 0.0014, |
| "step": 3210 |
| }, |
| { |
| "epoch": 0.8193384223918575, |
| "grad_norm": 0.08096902072429657, |
| "learning_rate": 8.040712468193384e-06, |
| "loss": 0.0016, |
| "step": 3220 |
| }, |
| { |
| "epoch": 0.821882951653944, |
| "grad_norm": 0.23578789830207825, |
| "learning_rate": 7.927622278767317e-06, |
| "loss": 0.0117, |
| "step": 3230 |
| }, |
| { |
| "epoch": 0.8244274809160306, |
| "grad_norm": 0.06997015327215195, |
| "learning_rate": 7.81453208934125e-06, |
| "loss": 0.0061, |
| "step": 3240 |
| }, |
| { |
| "epoch": 0.8269720101781171, |
| "grad_norm": 0.8557425141334534, |
| "learning_rate": 7.701441899915184e-06, |
| "loss": 0.0064, |
| "step": 3250 |
| }, |
| { |
| "epoch": 0.8295165394402035, |
| "grad_norm": 0.5004100203514099, |
| "learning_rate": 7.588351710489115e-06, |
| "loss": 0.003, |
| "step": 3260 |
| }, |
| { |
| "epoch": 0.8320610687022901, |
| "grad_norm": 0.5203502774238586, |
| "learning_rate": 7.475261521063049e-06, |
| "loss": 0.0053, |
| "step": 3270 |
| }, |
| { |
| "epoch": 0.8346055979643766, |
| "grad_norm": 0.7414500713348389, |
| "learning_rate": 7.362171331636981e-06, |
| "loss": 0.0136, |
| "step": 3280 |
| }, |
| { |
| "epoch": 0.8371501272264631, |
| "grad_norm": 0.27280735969543457, |
| "learning_rate": 7.249081142210914e-06, |
| "loss": 0.0063, |
| "step": 3290 |
| }, |
| { |
| "epoch": 0.8396946564885496, |
| "grad_norm": 0.6289187073707581, |
| "learning_rate": 7.135990952784847e-06, |
| "loss": 0.0115, |
| "step": 3300 |
| }, |
| { |
| "epoch": 0.8422391857506362, |
| "grad_norm": 0.7311071157455444, |
| "learning_rate": 7.022900763358779e-06, |
| "loss": 0.0022, |
| "step": 3310 |
| }, |
| { |
| "epoch": 0.8447837150127226, |
| "grad_norm": 0.13066644966602325, |
| "learning_rate": 6.909810573932712e-06, |
| "loss": 0.0071, |
| "step": 3320 |
| }, |
| { |
| "epoch": 0.8473282442748091, |
| "grad_norm": 0.1883714348077774, |
| "learning_rate": 6.796720384506644e-06, |
| "loss": 0.005, |
| "step": 3330 |
| }, |
| { |
| "epoch": 0.8498727735368957, |
| "grad_norm": 0.03761894628405571, |
| "learning_rate": 6.683630195080577e-06, |
| "loss": 0.0122, |
| "step": 3340 |
| }, |
| { |
| "epoch": 0.8524173027989822, |
| "grad_norm": 0.6123657822608948, |
| "learning_rate": 6.57054000565451e-06, |
| "loss": 0.005, |
| "step": 3350 |
| }, |
| { |
| "epoch": 0.8549618320610687, |
| "grad_norm": 0.3409605920314789, |
| "learning_rate": 6.457449816228443e-06, |
| "loss": 0.0144, |
| "step": 3360 |
| }, |
| { |
| "epoch": 0.8575063613231552, |
| "grad_norm": 0.13868448138237, |
| "learning_rate": 6.344359626802375e-06, |
| "loss": 0.0066, |
| "step": 3370 |
| }, |
| { |
| "epoch": 0.8600508905852418, |
| "grad_norm": 0.28151288628578186, |
| "learning_rate": 6.231269437376308e-06, |
| "loss": 0.0073, |
| "step": 3380 |
| }, |
| { |
| "epoch": 0.8625954198473282, |
| "grad_norm": 0.024988099932670593, |
| "learning_rate": 6.118179247950241e-06, |
| "loss": 0.0032, |
| "step": 3390 |
| }, |
| { |
| "epoch": 0.8651399491094147, |
| "grad_norm": 0.07107000797986984, |
| "learning_rate": 6.005089058524174e-06, |
| "loss": 0.0049, |
| "step": 3400 |
| }, |
| { |
| "epoch": 0.8676844783715013, |
| "grad_norm": 0.6919849514961243, |
| "learning_rate": 5.8919988690981064e-06, |
| "loss": 0.0125, |
| "step": 3410 |
| }, |
| { |
| "epoch": 0.8702290076335878, |
| "grad_norm": 0.031378187239170074, |
| "learning_rate": 5.778908679672038e-06, |
| "loss": 0.0037, |
| "step": 3420 |
| }, |
| { |
| "epoch": 0.8727735368956743, |
| "grad_norm": 0.3620636463165283, |
| "learning_rate": 5.6658184902459716e-06, |
| "loss": 0.0034, |
| "step": 3430 |
| }, |
| { |
| "epoch": 0.8753180661577609, |
| "grad_norm": 0.0069023617543280125, |
| "learning_rate": 5.552728300819904e-06, |
| "loss": 0.0031, |
| "step": 3440 |
| }, |
| { |
| "epoch": 0.8778625954198473, |
| "grad_norm": 0.007257466204464436, |
| "learning_rate": 5.439638111393837e-06, |
| "loss": 0.0108, |
| "step": 3450 |
| }, |
| { |
| "epoch": 0.8804071246819338, |
| "grad_norm": 0.0007503525703214109, |
| "learning_rate": 5.326547921967769e-06, |
| "loss": 0.006, |
| "step": 3460 |
| }, |
| { |
| "epoch": 0.8829516539440203, |
| "grad_norm": 0.06278515607118607, |
| "learning_rate": 5.213457732541703e-06, |
| "loss": 0.0019, |
| "step": 3470 |
| }, |
| { |
| "epoch": 0.8854961832061069, |
| "grad_norm": 0.21556618809700012, |
| "learning_rate": 5.100367543115635e-06, |
| "loss": 0.0048, |
| "step": 3480 |
| }, |
| { |
| "epoch": 0.8880407124681934, |
| "grad_norm": 0.6794685125350952, |
| "learning_rate": 4.987277353689568e-06, |
| "loss": 0.0106, |
| "step": 3490 |
| }, |
| { |
| "epoch": 0.8905852417302799, |
| "grad_norm": 0.08455541729927063, |
| "learning_rate": 4.8741871642635e-06, |
| "loss": 0.0174, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.8905852417302799, |
| "eval_loss": 0.025014488026499748, |
| "eval_runtime": 133.89, |
| "eval_samples_per_second": 59.616, |
| "eval_steps_per_second": 0.471, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.8931297709923665, |
| "grad_norm": 0.004242707509547472, |
| "learning_rate": 4.761096974837433e-06, |
| "loss": 0.0019, |
| "step": 3510 |
| }, |
| { |
| "epoch": 0.8956743002544529, |
| "grad_norm": 0.24488775432109833, |
| "learning_rate": 4.6480067854113655e-06, |
| "loss": 0.0027, |
| "step": 3520 |
| }, |
| { |
| "epoch": 0.8982188295165394, |
| "grad_norm": 0.3010653853416443, |
| "learning_rate": 4.534916595985299e-06, |
| "loss": 0.005, |
| "step": 3530 |
| }, |
| { |
| "epoch": 0.9007633587786259, |
| "grad_norm": 0.7691232562065125, |
| "learning_rate": 4.4218264065592315e-06, |
| "loss": 0.0083, |
| "step": 3540 |
| }, |
| { |
| "epoch": 0.9033078880407125, |
| "grad_norm": 0.008576878346502781, |
| "learning_rate": 4.308736217133164e-06, |
| "loss": 0.0049, |
| "step": 3550 |
| }, |
| { |
| "epoch": 0.905852417302799, |
| "grad_norm": 0.1735226809978485, |
| "learning_rate": 4.195646027707097e-06, |
| "loss": 0.004, |
| "step": 3560 |
| }, |
| { |
| "epoch": 0.9083969465648855, |
| "grad_norm": 0.005550161004066467, |
| "learning_rate": 4.082555838281029e-06, |
| "loss": 0.0092, |
| "step": 3570 |
| }, |
| { |
| "epoch": 0.910941475826972, |
| "grad_norm": 0.004711980931460857, |
| "learning_rate": 3.969465648854962e-06, |
| "loss": 0.0052, |
| "step": 3580 |
| }, |
| { |
| "epoch": 0.9134860050890585, |
| "grad_norm": 0.33986520767211914, |
| "learning_rate": 3.856375459428895e-06, |
| "loss": 0.0068, |
| "step": 3590 |
| }, |
| { |
| "epoch": 0.916030534351145, |
| "grad_norm": 0.13158228993415833, |
| "learning_rate": 3.7432852700028278e-06, |
| "loss": 0.0035, |
| "step": 3600 |
| }, |
| { |
| "epoch": 0.9185750636132316, |
| "grad_norm": 0.0033058361150324345, |
| "learning_rate": 3.6301950805767603e-06, |
| "loss": 0.0007, |
| "step": 3610 |
| }, |
| { |
| "epoch": 0.9211195928753181, |
| "grad_norm": 0.025659898295998573, |
| "learning_rate": 3.517104891150693e-06, |
| "loss": 0.0087, |
| "step": 3620 |
| }, |
| { |
| "epoch": 0.9236641221374046, |
| "grad_norm": 0.16379065811634064, |
| "learning_rate": 3.4040147017246255e-06, |
| "loss": 0.0084, |
| "step": 3630 |
| }, |
| { |
| "epoch": 0.926208651399491, |
| "grad_norm": 0.03266693651676178, |
| "learning_rate": 3.2909245122985585e-06, |
| "loss": 0.0113, |
| "step": 3640 |
| }, |
| { |
| "epoch": 0.9287531806615776, |
| "grad_norm": 0.02705550380051136, |
| "learning_rate": 3.177834322872491e-06, |
| "loss": 0.005, |
| "step": 3650 |
| }, |
| { |
| "epoch": 0.9312977099236641, |
| "grad_norm": 0.01155170239508152, |
| "learning_rate": 3.064744133446424e-06, |
| "loss": 0.0036, |
| "step": 3660 |
| }, |
| { |
| "epoch": 0.9338422391857506, |
| "grad_norm": 0.01289442554116249, |
| "learning_rate": 2.951653944020356e-06, |
| "loss": 0.0009, |
| "step": 3670 |
| }, |
| { |
| "epoch": 0.9363867684478372, |
| "grad_norm": 0.014147637411952019, |
| "learning_rate": 2.838563754594289e-06, |
| "loss": 0.0031, |
| "step": 3680 |
| }, |
| { |
| "epoch": 0.9389312977099237, |
| "grad_norm": 0.09966015815734863, |
| "learning_rate": 2.7254735651682217e-06, |
| "loss": 0.0046, |
| "step": 3690 |
| }, |
| { |
| "epoch": 0.9414758269720102, |
| "grad_norm": 0.3702790141105652, |
| "learning_rate": 2.6123833757421547e-06, |
| "loss": 0.0097, |
| "step": 3700 |
| }, |
| { |
| "epoch": 0.9440203562340967, |
| "grad_norm": 0.016322093084454536, |
| "learning_rate": 2.4992931863160873e-06, |
| "loss": 0.0029, |
| "step": 3710 |
| }, |
| { |
| "epoch": 0.9465648854961832, |
| "grad_norm": 0.06356921046972275, |
| "learning_rate": 2.38620299689002e-06, |
| "loss": 0.0049, |
| "step": 3720 |
| }, |
| { |
| "epoch": 0.9491094147582697, |
| "grad_norm": 0.002045559696853161, |
| "learning_rate": 2.273112807463953e-06, |
| "loss": 0.0085, |
| "step": 3730 |
| }, |
| { |
| "epoch": 0.9516539440203562, |
| "grad_norm": 0.5753226280212402, |
| "learning_rate": 2.1600226180378854e-06, |
| "loss": 0.0051, |
| "step": 3740 |
| }, |
| { |
| "epoch": 0.9541984732824428, |
| "grad_norm": 0.05781712010502815, |
| "learning_rate": 2.046932428611818e-06, |
| "loss": 0.0017, |
| "step": 3750 |
| }, |
| { |
| "epoch": 0.9567430025445293, |
| "grad_norm": 0.025260968133807182, |
| "learning_rate": 1.933842239185751e-06, |
| "loss": 0.0055, |
| "step": 3760 |
| }, |
| { |
| "epoch": 0.9592875318066157, |
| "grad_norm": 0.04884308576583862, |
| "learning_rate": 1.8207520497596833e-06, |
| "loss": 0.0073, |
| "step": 3770 |
| }, |
| { |
| "epoch": 0.9618320610687023, |
| "grad_norm": 0.34714359045028687, |
| "learning_rate": 1.7076618603336161e-06, |
| "loss": 0.0098, |
| "step": 3780 |
| }, |
| { |
| "epoch": 0.9643765903307888, |
| "grad_norm": 0.17071278393268585, |
| "learning_rate": 1.594571670907549e-06, |
| "loss": 0.0026, |
| "step": 3790 |
| }, |
| { |
| "epoch": 0.9669211195928753, |
| "grad_norm": 0.010923425666987896, |
| "learning_rate": 1.4814814814814815e-06, |
| "loss": 0.005, |
| "step": 3800 |
| }, |
| { |
| "epoch": 0.9694656488549618, |
| "grad_norm": 0.2712092101573944, |
| "learning_rate": 1.3683912920554142e-06, |
| "loss": 0.0096, |
| "step": 3810 |
| }, |
| { |
| "epoch": 0.9720101781170484, |
| "grad_norm": 0.029850659891963005, |
| "learning_rate": 1.2553011026293472e-06, |
| "loss": 0.0031, |
| "step": 3820 |
| }, |
| { |
| "epoch": 0.9745547073791349, |
| "grad_norm": 0.2776340842247009, |
| "learning_rate": 1.1422109132032796e-06, |
| "loss": 0.0168, |
| "step": 3830 |
| }, |
| { |
| "epoch": 0.9770992366412213, |
| "grad_norm": 0.022862764075398445, |
| "learning_rate": 1.0291207237772124e-06, |
| "loss": 0.0151, |
| "step": 3840 |
| }, |
| { |
| "epoch": 0.9796437659033079, |
| "grad_norm": 0.14763417840003967, |
| "learning_rate": 9.160305343511451e-07, |
| "loss": 0.0072, |
| "step": 3850 |
| }, |
| { |
| "epoch": 0.9821882951653944, |
| "grad_norm": 0.0049032000824809074, |
| "learning_rate": 8.029403449250778e-07, |
| "loss": 0.006, |
| "step": 3860 |
| }, |
| { |
| "epoch": 0.9847328244274809, |
| "grad_norm": 0.06708303838968277, |
| "learning_rate": 6.898501554990106e-07, |
| "loss": 0.0015, |
| "step": 3870 |
| }, |
| { |
| "epoch": 0.9872773536895675, |
| "grad_norm": 0.08744917064905167, |
| "learning_rate": 5.767599660729433e-07, |
| "loss": 0.005, |
| "step": 3880 |
| }, |
| { |
| "epoch": 0.989821882951654, |
| "grad_norm": 0.020036060363054276, |
| "learning_rate": 4.636697766468759e-07, |
| "loss": 0.0056, |
| "step": 3890 |
| }, |
| { |
| "epoch": 0.9923664122137404, |
| "grad_norm": 0.005213042721152306, |
| "learning_rate": 3.505795872208086e-07, |
| "loss": 0.0037, |
| "step": 3900 |
| }, |
| { |
| "epoch": 0.9949109414758269, |
| "grad_norm": 0.15998025238513947, |
| "learning_rate": 2.3748939779474134e-07, |
| "loss": 0.0041, |
| "step": 3910 |
| }, |
| { |
| "epoch": 0.9974554707379135, |
| "grad_norm": 0.04085616394877434, |
| "learning_rate": 1.2439920836867402e-07, |
| "loss": 0.001, |
| "step": 3920 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.00030692765722051263, |
| "learning_rate": 1.1309018942606728e-08, |
| "loss": 0.0045, |
| "step": 3930 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 3930, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.273606668827065e+18, |
| "train_batch_size": 128, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|